Compare commits

..

56 Commits

Author SHA1 Message Date
nolash
14850d06dd Omit version code in cov 2021-07-09 15:57:14 +02:00
nolash
1e602e1352 Add remaning query tests 2021-07-09 15:49:52 +02:00
nolash
167ff0f5e4 Make process tx pass 2021-07-09 15:26:11 +02:00
nolash
e166a6737f Add query test 2021-07-09 14:56:41 +02:00
nolash
31d700efe3 Add missing transferauth module 2021-07-09 11:38:56 +02:00
ffd989e7da update dockerfile for cic-eth 2021-07-08 15:14:15 -07:00
991d13b9c2 Merge branch 'bvander/cic-eth-base-image-and-testing' of gitlab.com:grassrootseconomics/cic-internal-integration into bvander/cic-eth-base-image-and-testing 2021-07-08 15:05:49 -07:00
480ed5b119 Merge branch 'master' into bvander/cic-eth-base-image-and-testing 2021-07-08 15:05:29 -07:00
nolash
fdf7a3a503 Omit migrations folder in coverage 2021-07-08 13:32:15 +02:00
nolash
e3079c3930 Remove package init files from runnables in cic-cache 2021-07-08 13:25:14 +02:00
e6fa450a2a put contract migration and data seeding in local context 2021-07-07 16:49:39 -07:00
9a7ccc3228 fixed the eth build 2021-07-07 16:39:05 -07:00
7fc99afe1b ussd pypi args 2021-07-07 16:24:29 -07:00
5a55d76faf fix eth kaniko args 2021-07-07 16:20:48 -07:00
74eb5df1bb fix mr to send image to right the place and can we do kaniko target arg as a variable?? 2021-07-07 16:15:42 -07:00
d9b25c1063 add notify and we done 2021-07-07 16:07:50 -07:00
319e587f29 cic ussd refactor 2021-07-07 16:00:11 -07:00
1bfc736513 do the dumb ci dockerfile 2021-07-07 15:49:00 -07:00
80d7693532 do cic-cache 2021-07-07 15:47:42 -07:00
2f36e3496e add another requirements 2021-07-07 15:18:28 -07:00
de00762c2c ok maybe this is multiline 2021-07-07 15:10:44 -07:00
a71ef0a388 is this a multiline? 2021-07-07 15:05:27 -07:00
23cf75a4c9 multipass err...multiREQUIREMENTS 2021-07-07 14:58:29 -07:00
de36fe3fab multipass err...multiline 2021-07-07 14:33:32 -07:00
deeba250e4 fix eth and meta tests 2021-07-07 14:24:26 -07:00
5668ed1cf5 Merge branch 'bvander/cic-eth-base-image-and-testing' of gitlab.com:grassrootseconomics/cic-internal-integration into bvander/cic-eth-base-image-and-testing 2021-07-07 14:08:26 -07:00
8b7a26abb9 Merge branch 'master' into bvander/cic-eth-base-image-and-testing 2021-07-07 14:05:39 -07:00
e40f15d106 Update apps/contract-migration/.gitlab-ci.yml, apps/data-seeding/.gitlab-ci.yml files 2021-07-06 20:41:57 +00:00
b94116a683 Update apps/cic-meta/.gitlab-ci.yml 2021-07-06 20:31:12 +00:00
635ef775df Update apps/cic-meta/.gitlab-ci.yml 2021-07-06 20:30:12 +00:00
c35e230e9f Update apps/cic-notify/.gitlab-ci.yml 2021-07-06 20:29:06 +00:00
0c842cf5d7 Update apps/cic-ussd/.gitlab-ci.yml 2021-07-06 20:28:29 +00:00
e8f65dcd29 Update apps/contract-migration/.gitlab-ci.yml, apps/data-seeding/.gitlab-ci.yml, .gitlab-ci.yml, apps/cic-cache/.gitlab-ci.yml, apps/cic-eth/.gitlab-ci.yml, apps/cic-meta/.gitlab-ci.yml, apps/cic-notify/.gitlab-ci.yml, apps/cic-ussd/.gitlab-ci.yml files 2021-07-06 20:27:32 +00:00
d679a8441e Update .gitlab-ci.yml, apps/data-seeding/.gitlab-ci.yml, apps/cic-ussd/.gitlab-ci.yml, apps/cic-notify/.gitlab-ci.yml, apps/cic-meta/.gitlab-ci.yml, apps/cic-cache/.gitlab-ci.yml files 2021-07-06 20:05:18 +00:00
58f3a90206 Update apps/contract-migration/.gitlab-ci.yml, apps/cic-eth/.gitlab-ci.yml files 2021-07-06 19:54:36 +00:00
3256f23121 Update apps/cic-eth/docker/Dockerfile 2021-07-06 19:52:55 +00:00
8cdd405122 Update apps/contract-migration/docker/Dockerfile, apps/cic-eth/.gitlab-ci.yml, ci_templates/.cic-template.yml, apps/contract-migration/.gitlab-ci.yml files 2021-07-06 19:51:53 +00:00
db4eb31de0 Update apps/contract-migration/docker/Dockerfile 2021-07-06 19:43:44 +00:00
0fa7b99a15 Update apps/contract-migration/.gitlab-ci.yml 2021-07-06 19:42:42 +00:00
b3b051d3d5 Update apps/contract-migration/.gitlab-ci.yml, apps/contract-migration/docker/Dockerfile files 2021-07-06 19:41:24 +00:00
b86b57bcb7 Update apps/contract-migration/docker/Dockerfile 2021-07-06 19:40:36 +00:00
4ecf4f1214 Update ci_templates/.cic-template.yml, apps/cic-eth/.gitlab-ci.yml, .gitlab-ci.yml, apps/contract-migration/.gitlab-ci.yml files 2021-07-06 19:36:17 +00:00
e3da256e66 Update apps/cic-eth/.gitlab-ci.yml 2021-07-06 19:11:51 +00:00
c8e24c96c5 Update apps/cic-eth/.gitlab-ci.yml 2021-07-06 19:10:49 +00:00
ecc5d26666 Update apps/cic-eth/.gitlab-ci.yml 2021-07-06 19:00:09 +00:00
304550c2ae Update apps/cic-eth/.gitlab-ci.yml 2021-07-06 18:53:04 +00:00
93f0353808 added pythonpath 2021-07-06 11:43:59 -07:00
2e6e80e134 Update ci_templates/.cic-template.yml, apps/cic-eth/.gitlab-ci.yml files 2021-07-06 18:12:32 +00:00
bc868fd2c8 add syntax line 2021-07-06 11:06:21 -07:00
0b3b1ef459 change cic-eth context 2021-07-06 10:57:20 -07:00
f72d577b8c Update docker-compose.yml, apps/cic-eth/.gitlab-ci.yml, ci_templates/.cic-template.yml files 2021-07-06 17:30:12 +00:00
068d1112d2 Update ci_templates/.cic-template.yml, apps/cic-eth/.gitlab-ci.yml files 2021-07-06 17:15:24 +00:00
460a7c223b Update apps/cic-eth/docker/Dockerfile, apps/cic-eth/docker/Dockerfile_ci files 2021-07-06 17:11:26 +00:00
dd3ddfd41d Update apps/cic-eth/docker/Dockerfile_ci, apps/cic-eth/.gitlab-ci.yml files 2021-07-06 17:09:32 +00:00
8a6a2fd008 Update ci_templates/.cic-template.yml 2021-07-06 17:06:11 +00:00
3fca9a6744 Update .gitlab-ci.yml, ci_templates/.cic-template.yml, apps/cic-eth/docker/Dockerfile, apps/cic-eth/.gitlab-ci.yml files 2021-07-06 17:02:56 +00:00
107 changed files with 8811 additions and 1352 deletions

View File

@@ -36,7 +36,7 @@ test-mr-cic-cache:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
- apps/cic-eth/**/*
when: always
build-push-cic-cache:

View File

@@ -100,4 +100,3 @@ class SessionBase(Model):
logg.debug('destroying session {}'.format(session_key))
session.commit()
session.close()
del SessionBase.localsessions[session_key]

View File

@@ -1,13 +1,13 @@
cic-base~=0.2.0a4
cic-base==0.1.3a3+build.984b5cff
alembic==1.4.2
confini>=0.3.6rc3,<0.5.0
confini~=0.3.6rc3
uwsgi==2.0.19.1
moolb~=0.1.0
cic-eth-registry~=0.5.6a2
cic-eth-registry~=0.5.6a1
SQLAlchemy==1.3.20
semver==2.13.0
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3
chainsyncer[sql]~=0.0.3a5
erc20-faucet~=0.2.2a2
chainsyncer[sql]~=0.0.3a3
erc20-faucet~=0.2.2a1

View File

@@ -1,5 +1,5 @@
SQLAlchemy==1.3.20
cic-eth-registry>=0.5.6a2,<0.6.0
cic-eth-registry~=0.5.6a1
hexathon~=0.0.1a7
chainqueue>=0.0.3a1,<0.1.0
eth-erc20>=0.0.10a3,<0.1.0
chainqueue~=0.0.2b5
eth-erc20==0.0.10a2

View File

@@ -6,11 +6,6 @@ import logging
import celery
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.chain import ChainSpec
from hexathon import (
add_0x,
strip_0x,
uniform as hex_uniform,
)
# local imports
from cic_eth.db.enum import LockEnum
@@ -24,12 +19,6 @@ from cic_eth.error import LockedError
celery_app = celery.current_app
logg = logging.getLogger()
def normalize_address(a):
if a == None:
return None
return add_0x(hex_uniform(strip_0x(a)))
@celery_app.task(base=CriticalSQLAlchemyTask)
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
"""Task wrapper to set arbitrary locks
@@ -43,7 +32,6 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
@@ -65,7 +53,6 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
@@ -85,7 +72,6 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
logg.debug('Send locked for {}, flag now {}'.format(address, r))
@@ -103,7 +89,6 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
@@ -121,7 +106,6 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
@@ -139,7 +123,6 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
:returns: New lock state for address
:rtype: number
"""
address = normalize_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
@@ -148,7 +131,6 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
@celery_app.task(base=CriticalSQLAlchemyTask)
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
address = normalize_address(address)
chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))

View File

@@ -14,11 +14,7 @@ from chainqueue.sql.query import get_tx
from chainqueue.sql.state import set_cancel
from chainqueue.db.models.otx import Otx
from chainqueue.db.models.tx import TxCache
from hexathon import (
strip_0x,
add_0x,
uniform as hex_uniform,
)
from hexathon import strip_0x
from potaahto.symbols import snake_and_camel
# local imports
@@ -73,17 +69,15 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
query_address = add_0x(hex_uniform(strip_0x(address))) # aaaaargh
q = session.query(Otx)
q = q.join(TxCache)
q = q.filter(TxCache.sender==query_address)
q = q.filter(TxCache.sender==address)
q = q.filter(Otx.nonce>=nonce+delta)
q = q.order_by(Otx.nonce.asc())
otxs = q.all()
tx_hashes = []
txs = []
gas_total = 0
for otx in otxs:
tx_raw = bytes.fromhex(strip_0x(otx.signed_tx))
tx_new = unpack(tx_raw, chain_spec)
@@ -95,10 +89,8 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
tx_new['gas_price'] += 1
tx_new['gasPrice'] = tx_new['gas_price']
tx_new['nonce'] -= delta
gas_total += tx_new['gas_price'] * tx_new['gas']
logg.debug('tx_new {}'.format(tx_new))
logg.debug('gas running total {}'.format(gas_total))
del(tx_new['hash'])
del(tx_new['hash_unsigned'])
@@ -130,10 +122,8 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
s = create_check_gas_task(
txs,
chain_spec,
#tx_new['from'],
address,
#gas=tx_new['gas'],
gas=gas_total,
tx_new['from'],
gas=tx_new['gas'],
tx_hashes_hex=tx_hashes,
queue=queue,
)
@@ -142,8 +132,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
'cic_eth.admin.ctrl.unlock_send',
[
chain_spec.asdict(),
address,
#tx_new['from'],
tx_new['from'],
],
queue=queue,
)
@@ -151,8 +140,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
'cic_eth.admin.ctrl.unlock_queue',
[
chain_spec.asdict(),
address,
#tx_new['from'],
tx_new['from'],
],
queue=queue,
)

View File

@@ -21,7 +21,6 @@ from chainlib.hash import keccak256_hex_to_hex
from hexathon import (
strip_0x,
add_0x,
uniform as hex_uniform,
)
from chainlib.eth.gas import balance
from chainqueue.db.enum import (
@@ -308,8 +307,6 @@ class AdminApi:
:param address: Ethereum address to return transactions for
:type address: str, 0x-hex
"""
address = add_0x(hex_uniform(strip_0x(address)))
last_nonce = -1
s = celery.signature(
'cic_eth.queue.query.get_account_tx',

View File

@@ -8,8 +8,7 @@ Create Date: 2021-04-02 18:30:55.398388
from alembic import op
import sqlalchemy as sa
#from chainqueue.db.migrations.sqlalchemy import (
from chainqueue.db.migrations.default.export import (
from chainqueue.db.migrations.sqlalchemy import (
chainqueue_upgrade,
chainqueue_downgrade,
)

View File

@@ -8,8 +8,7 @@ Create Date: 2021-04-02 18:36:44.459603
from alembic import op
import sqlalchemy as sa
#from chainsyncer.db.migrations.sqlalchemy import (
from chainsyncer.db.migrations.default.export import (
from chainsyncer.db.migrations.sqlalchemy import (
chainsyncer_upgrade,
chainsyncer_downgrade,
)

View File

@@ -126,4 +126,3 @@ class SessionBase(Model):
logg.debug('commit and destroy session {}'.format(session_key))
session.commit()
session.close()
del SessionBase.localsessions[session_key]

View File

@@ -23,7 +23,7 @@ from chainlib.error import JSONRPCException
from eth_accounts_index.registry import AccountRegistry
from eth_accounts_index import AccountsIndex
from sarafu_faucet import MinterFaucet
from chainqueue.sql.tx import cache_tx_dict
from chainqueue.db.models.tx import TxCache
# local import
from cic_eth_registry import CICRegistry
@@ -300,17 +300,20 @@ def cache_gift_data(
session = self.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': tx['to'],
'source_token': ZERO_ADDRESS,
'destination_token': ZERO_ADDRESS,
'from_value': 0,
'to_value': 0,
}
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
tx['to'],
ZERO_ADDRESS,
ZERO_ADDRESS,
0,
0,
session=session,
)
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
@@ -339,15 +342,18 @@ def cache_account_data(
tx_data = AccountsIndex.parse_add_request(tx['data'])
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': tx['to'],
'source_token': ZERO_ADDRESS,
'destination_token': ZERO_ADDRESS,
'from_value': 0,
'to_value': 0,
}
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
tx['to'],
ZERO_ADDRESS,
ZERO_ADDRESS,
0,
0,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)

View File

@@ -0,0 +1,385 @@
# standard imports
import os
import logging
# third-party imports
import celery
import web3
from cic_registry import CICRegistry
from cic_registry.chain import ChainSpec
# local imports
from cic_eth.db import SessionBase
from cic_eth.db.models.convert import TxConvertTransfer
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.tx import TxCache
from cic_eth.eth.task import sign_and_register_tx
from cic_eth.eth.task import create_check_gas_and_send_task
from cic_eth.eth.token import TokenTxFactory
from cic_eth.eth.factory import TxFactory
from cic_eth.eth.util import unpack_signed_raw_tx
from cic_eth.eth.rpc import RpcClient
celery_app = celery.current_app
#logg = celery_app.log.get_default_logger()
logg = logging.getLogger()
contract_function_signatures = {
'convert': 'f3898a97',
'convert2': '569706eb',
}
class BancorTxFactory(TxFactory):
"""Factory for creating Bancor network transactions.
"""
def convert(
self,
source_token_address,
destination_token_address,
reserve_address,
source_amount,
minimum_return,
chain_spec,
fee_beneficiary='0x0000000000000000000000000000000000000000',
fee_ppm=0,
):
"""Create a BancorNetwork "convert" transaction.
:param source_token_address: ERC20 contract address for token to convert from
:type source_token_address: str, 0x-hex
:param destination_token_address: ERC20 contract address for token to convert to
:type destination_token_address: str, 0x-hex
:param reserve_address: ERC20 contract address of Common reserve token
:type reserve_address: str, 0x-hex
:param source_amount: Amount of source tokens to convert
:type source_amount: int
:param minimum_return: Minimum amount of destination tokens to accept as result for conversion
:type source_amount: int
:return: Unsigned "convert" transaction in standard Ethereum format
:rtype: dict
"""
network_contract = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
network_gas = network_contract.gas('convert')
tx_convert_buildable = network_contract.contract.functions.convert2(
[
source_token_address,
source_token_address,
reserve_address,
destination_token_address,
destination_token_address,
],
source_amount,
minimum_return,
fee_beneficiary,
fee_ppm,
)
tx_convert = tx_convert_buildable.buildTransaction({
'from': self.address,
'gas': network_gas,
'gasPrice': self.gas_price,
'chainId': chain_spec.chain_id(),
'nonce': self.next_nonce(),
})
return tx_convert
def unpack_convert(data):
f = data[2:10]
if f != contract_function_signatures['convert2']:
raise ValueError('Invalid convert data ({})'.format(f))
d = data[10:]
path = d[384:]
source = path[64-40:64]
destination = path[-40:]
amount = int(d[64:128], 16)
min_return = int(d[128:192], 16)
fee_recipient = d[192:256]
fee = int(d[256:320], 16)
return {
'amount': amount,
'min_return': min_return,
'source_token': web3.Web3.toChecksumAddress('0x' + source),
'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
'fee_recipient': fee_recipient,
'fee': fee,
}
# Kept for historical reference, it unpacks a convert call without fee parameters
#def _unpack_convert_mint(data):
# f = data[2:10]
# if f != contract_function_signatures['convert2']:
# raise ValueError('Invalid convert data ({})'.format(f))
#
# d = data[10:]
# path = d[256:]
# source = path[64-40:64]
# destination = path[-40:]
#
# amount = int(d[64:128], 16)
# min_return = int(d[128:192], 16)
# return {
# 'amount': amount,
# 'min_return': min_return,
# 'source_token': web3.Web3.toChecksumAddress('0x' + source),
# 'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
# }
@celery_app.task(bind=True)
def convert_with_default_reserve(self, tokens, from_address, source_amount, minimum_return, to_address, chain_str):
"""Performs a conversion between two liquid tokens using Bancor network.
:param tokens: Token pair, source and destination respectively
:type tokens: list of str, 0x-hex
:param from_address: Ethereum address of sender
:type from_address: str, 0x-hex
:param source_amount: Amount of source tokens to convert
:type source_amount: int
:param minimum_return: Minimum about of destination tokens to receive
:type minimum_return: int
"""
chain_spec = ChainSpec.from_chain_str(chain_str)
queue = self.request.delivery_info['routing_key']
c = RpcClient(chain_spec, holder_address=from_address)
cr = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
source_token = CICRegistry.get_address(chain_spec, tokens[0]['address'])
reserve_address = CICRegistry.get_contract(chain_spec, 'BNTToken', 'ERC20').address()
tx_factory = TokenTxFactory(from_address, c)
tx_approve_zero = tx_factory.approve(source_token.address(), cr.address(), 0, chain_spec)
(tx_approve_zero_hash_hex, tx_approve_zero_signed_hex) = sign_and_register_tx(tx_approve_zero, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
tx_approve = tx_factory.approve(source_token.address(), cr.address(), source_amount, chain_spec)
(tx_approve_hash_hex, tx_approve_signed_hex) = sign_and_register_tx(tx_approve, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
tx_factory = BancorTxFactory(from_address, c)
tx_convert = tx_factory.convert(
tokens[0]['address'],
tokens[1]['address'],
reserve_address,
source_amount,
minimum_return,
chain_spec,
)
(tx_convert_hash_hex, tx_convert_signed_hex) = sign_and_register_tx(tx_convert, chain_str, queue, 'cic_eth.eth.bancor.otx_cache_convert')
# TODO: consider moving save recipient to async task / chain it before the tx send
if to_address != None:
save_convert_recipient(tx_convert_hash_hex, to_address, chain_str)
s = create_check_gas_and_send_task(
[tx_approve_zero_signed_hex, tx_approve_signed_hex, tx_convert_signed_hex],
chain_str,
from_address,
tx_approve_zero['gasPrice'] * tx_approve_zero['gas'],
tx_hashes_hex=[tx_approve_hash_hex],
queue=queue,
)
s.apply_async()
return tx_convert_hash_hex
#@celery_app.task()
#def process_approval(tx_hash_hex):
# t = session.query(TxConvertTransfer).query(TxConvertTransfer.approve_tx_hash==tx_hash_hex).first()
# c = session.query(Otx).query(Otx.tx_hash==t.convert_tx_hash)
# gas_limit = 8000000
# gas_price = GasOracle.gas_price()
#
# # TODO: use celery group instead
# s_queue = celery.signature(
# 'cic_eth.queue.tx.create',
# [
# nonce,
# c['address'], # TODO: check that this is in fact sender address
# c['tx_hash'],
# c['signed_tx'],
# ]
# )
# s_queue.apply_async()
#
# s_check_gas = celery.signature(
# 'cic_eth.eth.gas.check_gas',
# [
# c['address'],
# [c['signed_tx']],
# gas_limit * gas_price,
# ]
# )
# s_send = celery.signature(
# 'cic_eth.eth.tx.send',
# [],
# )
#
# s_set_sent = celery.signature(
# 'cic_eth.queue.state.set_sent',
# [False],
# )
# s_send.link(s_set_sent)
# s_check_gas.link(s_send)
# s_check_gas.apply_async()
# return tx_hash_hex
@celery_app.task()
def save_convert_recipient(convert_hash, recipient_address, chain_str):
"""Registers the recipient target for a convert-and-transfer operation.
:param convert_hash: Transaction hash of convert operation
:type convert_hash: str, 0x-hex
:param recipient_address: Address of consequtive transfer recipient
:type recipient_address: str, 0x-hex
"""
session = SessionBase.create_session()
t = TxConvertTransfer(convert_hash, recipient_address, chain_str)
session.add(t)
session.commit()
session.close()
@celery_app.task()
def save_convert_transfer(convert_hash, transfer_hash):
"""Registers that the transfer part of a convert-and-transfer operation has been executed.
:param convert_hash: Transaction hash of convert operation
:type convert_hash: str, 0x-hex
:param convert_hash: Transaction hash of transfer operation
:type convert_hash: str, 0x-hex
:returns: transfer_hash,
:rtype: list, single str, 0x-hex
"""
session = SessionBase.create_session()
t = TxConvertTransfer.get(convert_hash)
t.transfer(transfer_hash)
session.add(t)
session.commit()
session.close()
return [transfer_hash]
# TODO: seems unused, consider removing
@celery_app.task()
def resolve_converters_by_tokens(tokens, chain_str):
"""Return converters for a list of tokens.
:param tokens: Token addresses to look up
:type tokens: list of str, 0x-hex
:return: Addresses of matching converters
:rtype: list of str, 0x-hex
"""
chain_spec = ChainSpec.from_chain_str(chain_str)
for t in tokens:
c = CICRegistry.get_contract(chain_spec, 'ConverterRegistry')
fn = c.function('getConvertersByAnchors')
try:
converters = fn([t['address']]).call()
except Exception as e:
raise e
t['converters'] = converters
return tokens
@celery_app.task(bind=True)
def transfer_converted(self, tokens, holder_address, receiver_address, value, tx_convert_hash_hex, chain_str):
"""Execute the ERC20 transfer of a convert-and-transfer operation.
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
:param tokens: Token addresses
:type tokens: list of str, 0x-hex
:param holder_address: Token holder address
:type holder_address: str, 0x-hex
:param holder_address: Token receiver address
:type holder_address: str, 0x-hex
:param value: Amount of token, in 'wei'
:type value: int
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
:return: Transaction hash
:rtype: str, 0x-hex
"""
# we only allow one token, one transfer
if len(tokens) != 1:
raise TokenCountError
chain_spec = ChainSpec.from_chain_str(chain_str)
queue = self.request.delivery_info['routing_key']
c = RpcClient(chain_spec, holder_address=holder_address)
# get transaction parameters
gas_price = c.gas_price()
tx_factory = TokenTxFactory(holder_address, c)
token_address = tokens[0]['address']
tx_transfer = tx_factory.transfer(
token_address,
receiver_address,
value,
chain_spec,
)
(tx_transfer_hash_hex, tx_transfer_signed_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, 'cic_eth.eth.token.otx_cache_transfer')
# send transaction
logg.info('transfer converted token {} from {} to {} value {} {}'.format(token_address, holder_address, receiver_address, value, tx_transfer_signed_hex))
s = create_check_gas_and_send_task(
[tx_transfer_signed_hex],
chain_str,
holder_address,
tx_transfer['gasPrice'] * tx_transfer['gas'],
None,
queue,
)
s_save = celery.signature(
'cic_eth.eth.bancor.save_convert_transfer',
[
tx_convert_hash_hex,
tx_transfer_hash_hex,
],
queue=queue,
)
s_save.link(s)
s_save.apply_async()
return tx_transfer_hash_hex
@celery_app.task()
def otx_cache_convert(
tx_hash_hex,
tx_signed_raw_hex,
chain_str,
):
chain_spec = ChainSpec.from_chain_str(chain_str)
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
tx = unpack(tx_signed_raw_bytes, chain_spec)
tx_data = unpack_convert(tx['data'])
logg.debug('tx data {}'.format(tx_data))
session = TxCache.create_session()
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
tx['from'],
tx_data['source_token'],
tx_data['destination_token'],
tx_data['amount'],
tx_data['amount'],
)
session.add(tx_cache)
session.commit()
session.close()
return tx_hash_hex

View File

@@ -13,9 +13,9 @@ from chainlib.eth.tx import (
from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token
from hexathon import strip_0x
from chainqueue.db.models.tx import TxCache
from chainqueue.error import NotLocalTxError
from eth_erc20 import ERC20
from chainqueue.sql.tx import cache_tx_dict
# local imports
from cic_eth.db.models.base import SessionBase
@@ -375,16 +375,19 @@ def cache_transfer_data(
token_value = tx_data[1]
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
recipient_address,
tx['to'],
tx['to'],
token_value,
token_value,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
@@ -414,16 +417,19 @@ def cache_transfer_from_data(
token_value = tx_data[2]
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
recipient_address,
tx['to'],
tx['to'],
token_value,
token_value,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
@@ -452,16 +458,19 @@ def cache_approve_data(
token_value = tx_data[1]
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
recipient_address,
tx['to'],
tx['to'],
token_value,
token_value,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)

View File

@@ -9,7 +9,6 @@ from chainlib.chain import ChainSpec
from chainlib.eth.address import is_checksum_address
from chainlib.connection import RPCConnection
from chainqueue.db.enum import StatusBits
from chainqueue.sql.tx import cache_tx_dict
from chainlib.eth.gas import (
balance,
price,
@@ -134,17 +133,20 @@ def cache_gas_data(
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': tx['to'],
'source_token': ZERO_ADDRESS,
'destination_token': ZERO_ADDRESS,
'from_value': tx['value'],
'to_value': tx['value'],
}
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
tx['to'],
ZERO_ADDRESS,
ZERO_ADDRESS,
tx['value'],
tx['value'],
session=session,
)
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)

View File

@@ -18,6 +18,7 @@ from hexathon import (
strip_0x,
)
from chainqueue.db.models.tx import Otx
from chainqueue.db.models.tx import TxCache
from chainqueue.db.enum import StatusBits
from chainqueue.error import NotLocalTxError
from potaahto.symbols import snake_and_camel

View File

@@ -14,11 +14,9 @@ from chainlib.eth.tx import (
)
from chainlib.eth.block import block_by_number
from chainlib.eth.contract import abi_decode_single
from chainlib.eth.constant import ZERO_ADDRESS
from hexathon import strip_0x
from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token
from cic_eth_registry.error import UnknownContractError
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusEnum
from chainqueue.sql.query import get_tx_cache
@@ -116,6 +114,9 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
# TODO: pass through registry to validate declarator entry of token
#token = registry.by_address(tx['to'], sender_address=self.call_address)
token = ERC20Token(chain_spec, rpc, tx['to'])
token_symbol = token.symbol
token_decimals = token.decimals
times = tx_times(tx['hash'], chain_spec)
tx_r = {
'hash': tx['hash'],
@@ -125,6 +126,12 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
'destination_value': tx_token_value,
'source_token': tx['to'],
'destination_token': tx['to'],
'source_token_symbol': token_symbol,
'destination_token_symbol': token_symbol,
'source_token_decimals': token_decimals,
'destination_token_decimals': token_decimals,
'source_token_chain': chain_str,
'destination_token_chain': chain_str,
'nonce': tx['nonce'],
}
if times['queue'] != None:
@@ -139,8 +146,8 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
# TODO: Surely it must be possible to optimize this
# TODO: DRY this with callback filter in cic_eth/runnable/manager
# TODO: Remove redundant fields from end representation (timestamp, tx_hash)
@celery_app.task(bind=True, base=BaseTask)
def tx_collate(self, tx_batches, chain_spec_dict, offset, limit, newest_first=True, verify_contracts=True):
@celery_app.task()
def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True):
"""Merges transaction data from multiple sources and sorts them in chronological order.
:param tx_batches: Transaction data inputs
@@ -189,32 +196,6 @@ def tx_collate(self, tx_batches, chain_spec_dict, offset, limit, newest_first=Tr
if newest_first:
ks.reverse()
for k in ks:
tx = txs_by_block[k]
if verify_contracts:
try:
tx = verify_and_expand(tx, chain_spec, sender_address=BaseTask.call_address)
except UnknownContractError:
logg.error('verify failed on tx {}, skipping'.format(tx['hash']))
continue
txs.append(tx)
txs.append(txs_by_block[k])
return txs
def verify_and_expand(tx, chain_spec, sender_address=ZERO_ADDRESS):
rpc = RPCConnection.connect(chain_spec, 'default')
registry = CICRegistry(chain_spec, rpc)
if tx.get('source_token_symbol') == None and tx['source_token'] != ZERO_ADDRESS:
r = registry.by_address(tx['source_token'], sender_address=sender_address)
token = ERC20Token(chain_spec, rpc, tx['source_token'])
tx['source_token_symbol'] = token.symbol
tx['source_token_decimals'] = token.decimals
if tx.get('destination_token_symbol') == None and tx['destination_token'] != ZERO_ADDRESS:
r = registry.by_address(tx['destination_token'], sender_address=sender_address)
token = ERC20Token(chain_spec, rpc, tx['destination_token'])
tx['destination_token_symbol'] = token.symbol
tx['destination_token_decimals'] = token.decimals
return tx

View File

@@ -27,7 +27,7 @@ def database_engine(
SessionBase.poolable = False
dsn = dsn_from_config(load_config)
#SessionBase.connect(dsn, True)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
SessionBase.connect(dsn, debug=load_config.get('DATABASE_DEBUG') != None)
return dsn

View File

@@ -100,7 +100,6 @@ def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, re
q_outer = q_outer.join(Lock, isouter=True)
q_outer = q_outer.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0))
if not is_alive(status):
SessionBase.release_session(session)
raise ValueError('not a valid non-final tx value: {}'.format(status))

View File

@@ -80,12 +80,7 @@ def main():
t = api.create_account(register=register)
ps.get_message()
try:
o = ps.get_message(timeout=args.timeout)
except TimeoutError as e:
sys.stderr.write('got no new address from cic-eth before timeout: {}\n'.format(e))
sys.exit(1)
ps.unsubscribe()
o = ps.get_message(timeout=args.timeout)
m = json.loads(o['data'])
print(m['result'])

View File

@@ -90,7 +90,6 @@ class DispatchSyncer:
def __init__(self, chain_spec):
self.chain_spec = chain_spec
self.session = None
def chain(self):
@@ -101,18 +100,16 @@ class DispatchSyncer:
c = len(txs.keys())
logg.debug('processing {} txs {}'.format(c, list(txs.keys())))
chain_str = str(self.chain_spec)
self.session = SessionBase.create_session()
session = SessionBase.create_session()
for k in txs.keys():
tx_raw = txs[k]
tx_raw_bytes = bytes.fromhex(strip_0x(tx_raw))
tx = unpack(tx_raw_bytes, self.chain_spec)
try:
set_reserved(self.chain_spec, tx['hash'], session=self.session)
self.session.commit()
set_reserved(self.chain_spec, tx['hash'], session=session)
except NotLocalTxError as e:
logg.warning('dispatcher was triggered with non-local tx {}'.format(tx['hash']))
self.session.rollback()
continue
s_check = celery.signature(
@@ -135,25 +132,16 @@ class DispatchSyncer:
s_check.link(s_send)
t = s_check.apply_async()
logg.info('processed {}'.format(k))
self.session.close()
self.session = None
def loop(self, interval):
def loop(self, w3, interval):
while run:
txs = {}
typ = StatusBits.QUEUED
utxs = get_upcoming_tx(self.chain_spec, typ)
for k in utxs.keys():
txs[k] = utxs[k]
try:
conn = RPCConnection.connect(self.chain_spec, 'default')
self.process(conn, txs)
except ConnectionError as e:
if self.session != None:
self.session.close()
self.session = None
logg.error('connection to node failed: {}'.format(e))
self.process(w3, txs)
if len(utxs) > 0:
time.sleep(self.yield_delay)
@@ -163,7 +151,8 @@ class DispatchSyncer:
def main():
syncer = DispatchSyncer(chain_spec)
syncer.loop(float(config.get('DISPATCHER_LOOP_INTERVAL')))
conn = RPCConnection.connect(chain_spec, 'default')
syncer.loop(conn, float(config.get('DISPATCHER_LOOP_INTERVAL')))
sys.exit(0)

View File

@@ -11,7 +11,6 @@ from chainqueue.db.enum import StatusBits
from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
from chainlib.eth.address import to_checksum_address
# local imports
from cic_eth.db.models.base import SessionBase
@@ -48,13 +47,12 @@ class GasFilter(SyncFilter):
SessionBase.release_session(session)
address = to_checksum_address(r[0])
logg.info('resuming gas-in-waiting txs for {}'.format(r[0]))
if len(txs) > 0:
s = create_check_gas_task(
list(txs.values()),
self.chain_spec,
address,
r[0],
0,
tx_hashes_hex=list(txs.keys()),
queue=self.queue,

View File

@@ -12,24 +12,20 @@ from hexathon import (
# local imports
from .base import SyncFilter
logg = logging.getLogger(__name__)
logg = logging.getLogger().getChild(__name__)
account_registry_add_log_hash = '0x9cc987676e7d63379f176ea50df0ae8d2d9d1141d1231d4ce15b5965f73c9430'
class RegistrationFilter(SyncFilter):
def __init__(self, chain_spec, contract_address, queue=None):
def __init__(self, chain_spec, queue):
self.chain_spec = chain_spec
self.queue = queue
self.contract_address = contract_address
def filter(self, conn, block, tx, db_session=None):
if self.contract_address != tx.inputs[0]:
logg.debug('not an account registry tx; {} != {}'.format(self.contract_address, tx.inputs[0]))
return None
registered_address = None
for l in tx.logs:
event_topic_hex = l['topics'][0]
if event_topic_hex == account_registry_add_log_hash:

View File

@@ -78,14 +78,6 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
rpc = RPCConnection.connect(chain_spec, 'default')
registry = None
try:
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1)
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
def main():
@@ -93,6 +85,7 @@ def main():
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
# Connect to blockchain with chainlib
rpc = RPCConnection.connect(chain_spec, 'default')
o = block_latest()
r = rpc.do(o)
@@ -158,8 +151,7 @@ def main():
tx_filter = TxFilter(chain_spec, config.get('_CELERY_QUEUE'))
account_registry_address = registry.by_name('AccountRegistry')
registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('_CELERY_QUEUE'))
registration_filter = RegistrationFilter(chain_spec, config.get('_CELERY_QUEUE'))
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE'))

View File

@@ -9,8 +9,8 @@ import semver
version = (
0,
12,
2,
'alpha.3',
0,
'alpha.2',
)
version_object = semver.VersionInfo(

View File

@@ -6,4 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=0
DEBUG=

View File

@@ -13,17 +13,11 @@ ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages
# --force-reinstall \
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
# -r requirements.txt
COPY *requirements.txt ./
COPY . .
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY \
--extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt \
-r services_requirements.txt \
-r admin_requirements.txt
COPY . .
RUN python setup.py install
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL .
ENV PYTHONPATH .
@@ -41,31 +35,30 @@ COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY util/liveness/health.sh /usr/local/bin/health.sh
ENTRYPOINT []
## ------------------ PRODUCTION CONTAINER ----------------------
#FROM python:3.8.6-slim-buster as prod
#
#RUN apt-get update && \
# apt install -y gnupg libpq-dev procps
#
#WORKDIR /root
#
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
# /usr/local/lib/python3.8/site-packages/
#
#COPY docker/entrypoints/* ./
#RUN chmod 755 *.sh
#
## # ini files in config directory defines the configurable parameters for the application
## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY scripts/ scripts/
#
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
##COPY util/liveness/health.sh /usr/local/bin/health.sh
#
#ENTRYPOINT []
# --- RUNTIME ---
FROM python:3.8.6-slim-buster as runtime
RUN apt-get update && \
apt install -y gnupg libpq-dev procps
WORKDIR /usr/src/cic-eth
COPY --from=dev /usr/local/bin/ /usr/local/bin/
COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
/usr/local/lib/python3.8/site-packages/
COPY docker/entrypoints/* ./
RUN chmod 755 *.sh
# # ini files in config directory defines the configurable parameters for the application
# # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh
ENTRYPOINT []

View File

@@ -14,16 +14,10 @@ ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages
# --force-reinstall \
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
# -r requirements.txt
COPY *requirements.txt .
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY \
--extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt \
-r services_requirements.txt \
-r admin_requirements.txt
COPY . .
RUN python setup.py install
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL .
COPY docker/entrypoints/* ./
RUN chmod 755 *.sh
@@ -39,31 +33,30 @@ COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY util/liveness/health.sh /usr/local/bin/health.sh
ENTRYPOINT []
# ------------------ PRODUCTION CONTAINER ----------------------
#FROM python:3.8.6-slim-buster as prod
#
#RUN apt-get update && \
# apt install -y gnupg libpq-dev procps
#
#WORKDIR /root
#
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
# /usr/local/lib/python3.8/site-packages/
#
#COPY docker/entrypoints/* ./
#RUN chmod 755 *.sh
#
## # ini files in config directory defines the configurable parameters for the application
## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY scripts/ scripts/
#
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
##COPY util/liveness/health.sh /usr/local/bin/health.sh
#
#ENTRYPOINT []
#
# --- RUNTIME ---
FROM python:3.8.6-slim-buster as runtime
RUN apt-get update && \
apt install -y gnupg libpq-dev procps
WORKDIR /usr/src/cic-eth
COPY --from=dev /usr/local/bin/ /usr/local/bin/
COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
/usr/local/lib/python3.8/site-packages/
COPY docker/entrypoints/* ./
RUN chmod 755 *.sh
# # ini files in config directory defines the configurable parameters for the application
# # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh
ENTRYPOINT []

View File

@@ -1,3 +1,3 @@
celery==4.4.7
chainlib-eth>=0.0.6a1,<0.1.0
chainlib~=0.0.5a1
semver==2.13.0

View File

@@ -1,15 +1,15 @@
chainqueue>=0.0.3a2,<0.1.0
chainsyncer[sql]>=0.0.5a1,<0.1.0
chainsyncer[sql]~=0.0.3a3
chainqueue~=0.0.2b5
alembic==1.4.2
confini>=0.3.6rc4,<0.5.0
confini~=0.3.6rc4
redis==3.5.3
hexathon~=0.0.1a7
pycryptodome==3.10.1
liveness~=0.0.1a7
eth-address-index>=0.1.3a1,<0.2.0
eth-accounts-index>=0.0.13a1,<0.1.0
cic-eth-registry>=0.5.7a1,<0.6.0
erc20-faucet>=0.2.3a1,<0.3.0
erc20-transfer-authorization>=0.3.3a1,<0.4.0
sarafu-faucet>=0.0.4a5,<0.1.0
eth-address-index~=0.1.2a1
eth-accounts-index~=0.0.12a1
cic-eth-registry~=0.5.6a1
erc20-faucet~=0.2.2a1
erc20-transfer-authorization~=0.3.2a1
sarafu-faucet~=0.0.4a1
moolb~=0.1.1b2

View File

@@ -6,4 +6,4 @@ pytest-redis==2.0.0
redis==3.5.3
eth-tester==0.5.0b3
py-evm==0.3.0a20
eth-erc20~=0.0.11a1
eth-erc20~=0.0.10a2

View File

@@ -18,7 +18,6 @@ def test_filter_bogus(
cic_registry,
contract_roles,
register_lookups,
account_registry,
):
fltrs = [
@@ -27,7 +26,7 @@ def test_filter_bogus(
TxFilter(default_chain_spec, None),
CallbackFilter(default_chain_spec, None, None, caller_address=contract_roles['CONTRACT_DEPLOYER']),
StragglerFilter(default_chain_spec, None),
RegistrationFilter(default_chain_spec, account_registry, queue=None),
RegistrationFilter(default_chain_spec, queue=None),
]
for fltr in fltrs:

View File

@@ -1,7 +1,3 @@
# standard imports
import logging
import os
# external imports
from eth_accounts_index.registry import AccountRegistry
from chainlib.connection import RPCConnection
@@ -18,17 +14,12 @@ from chainlib.eth.block import (
Block,
)
from erc20_faucet import Faucet
from hexathon import (
strip_0x,
add_0x,
)
from hexathon import strip_0x
from chainqueue.sql.query import get_account_tx
# local imports
from cic_eth.runnable.daemons.filters.register import RegistrationFilter
logg = logging.getLogger()
def test_register_filter(
default_chain_spec,
@@ -69,11 +60,7 @@ def test_register_filter(
tx = Tx(tx_src, block=block, rcpt=rcpt)
tx.apply_receipt(rcpt)
fltr = RegistrationFilter(default_chain_spec, add_0x(os.urandom(20).hex()), queue=None)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert t == None
fltr = RegistrationFilter(default_chain_spec, account_registry, queue=None)
fltr = RegistrationFilter(default_chain_spec, queue=None)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
t.get_leaf()

View File

@@ -290,7 +290,6 @@ def test_fix_nonce(
txs = get_nonce_tx_cache(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys()
assert len(ks) == 2
for k in ks:
hsh = add_0x(k)
otx = Otx.load(hsh, session=init_database)

View File

@@ -184,7 +184,7 @@ def test_admin_api_account(
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
r = api.account(default_chain_spec, agent_roles['ALICE'], include_sender=True, include_recipient=True)
r = api.account(default_chain_spec, agent_roles['ALICE'])
assert len(r) == 5
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])

View File

@@ -1,92 +0,0 @@
# external imports
import celery
import pytest
from chainlib.connection import RPCConnection
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.gas import (
RPCGasOracle,
)
from chainlib.eth.tx import (
TxFormat,
unpack,
)
from chainlib.eth.nonce import RPCNonceOracle
from eth_erc20 import ERC20
from hexathon import (
add_0x,
strip_0x,
)
from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx
def test_ext_tx_collate(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
custodial_roles,
agent_roles,
foo_token,
bar_token,
register_tokens,
cic_registry,
register_lookups,
init_celery_tasks,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = RPCNonceOracle(custodial_roles['FOO_TOKEN_GIFTER'], eth_rpc)
gas_oracle = RPCGasOracle(eth_rpc)
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
transfer_value_foo = 1000
transfer_value_bar = 1024
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(foo_token, custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], transfer_value_foo, tx_format=TxFormat.RLP_SIGNED)
tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec)
otx = Otx(
tx['nonce'],
tx_hash_hex,
tx_signed_raw_hex,
)
init_database.add(otx)
init_database.commit()
txc = TxCache(
tx_hash_hex,
tx['from'],
tx['to'],
foo_token,
bar_token,
transfer_value_foo,
transfer_value_bar,
666,
13,
session=init_database,
)
init_database.add(txc)
init_database.commit()
s = celery.signature(
'cic_eth.ext.tx.tx_collate',
[
{tx_hash_hex: tx_signed_raw_hex},
default_chain_spec.asdict(),
0,
100,
],
queue=None,
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
assert len(r) == 1
tx = r[0]
assert tx['source_token_symbol'] == 'FOO'
assert tx['source_token_decimals'] == 6
assert tx['destination_token_symbol'] == 'BAR'
assert tx['destination_token_decimals'] == 9

View File

@@ -1,7 +1,6 @@
# extended imports
# third-party imports
import pytest
import uuid
import unittest
# local imports
from cic_eth.db.models.nonce import (
@@ -56,7 +55,7 @@ def test_nonce_reserve(
o = q.first()
assert o.nonce == 43
nonce = NonceReservation.release(eth_empty_accounts[0], str(uu), session=init_database)
nonce = NonceReservation.release(eth_empty_accounts[0], str(uu))
init_database.commit()
assert nonce == (str(uu), 42)

View File

@@ -1,5 +1,4 @@
# external imports
import celery
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import (
receipt,
@@ -21,7 +20,6 @@ def test_translate(
cic_registry,
init_celery_tasks,
register_lookups,
celery_session_worker,
):
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
@@ -48,20 +46,6 @@ def test_translate(
'recipient': agent_roles['BOB'],
'recipient_label': None,
}
#tx = translate_tx_addresses(tx, [contract_roles['CONTRACT_DEPLOYER']], default_chain_spec.asdict())
s = celery.signature(
'cic_eth.ext.address.translate_tx_addresses',
[
tx,
[contract_roles['CONTRACT_DEPLOYER']],
default_chain_spec.asdict(),
],
queue=None,
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
assert r['sender_label'] == 'alice'
assert r['recipient_label'] == 'bob'
tx = translate_tx_addresses(tx, [contract_roles['CONTRACT_DEPLOYER']], default_chain_spec.asdict())
assert tx['sender_label'] == 'alice'
assert tx['recipient_label'] == 'bob'

View File

@@ -9,11 +9,6 @@ from chainlib.eth.gas import (
Gas,
)
from chainlib.chain import ChainSpec
from hexathon import (
add_0x,
strip_0x,
uniform as hex_uniform,
)
# local imports
from cic_eth.db.enum import LockEnum
@@ -39,10 +34,7 @@ def test_upcoming_with_lock(
gas_oracle = RPCGasOracle(eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
alice_normal = add_0x(hex_uniform(strip_0x(agent_roles['ALICE'])))
bob_normal = add_0x(hex_uniform(strip_0x(agent_roles['BOB'])))
(tx_hash_hex, tx_rpc) = c.create(alice_normal, bob_normal, 100 * (10 ** 6))
(tx_hash_hex, tx_rpc) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6))
tx_signed_raw_hex = tx_rpc['params'][0]
register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database)
@@ -51,12 +43,12 @@ def test_upcoming_with_lock(
txs = get_upcoming_tx(default_chain_spec, StatusEnum.PENDING)
assert len(txs.keys()) == 1
Lock.set(str(default_chain_spec), LockEnum.SEND, address=alice_normal)
Lock.set(str(default_chain_spec), LockEnum.SEND, address=agent_roles['ALICE'])
txs = get_upcoming_tx(default_chain_spec, status=StatusEnum.PENDING)
txs = get_upcoming_tx(default_chain_spec, StatusEnum.PENDING)
assert len(txs.keys()) == 0
(tx_hash_hex, tx_rpc) = c.create(bob_normal, alice_normal, 100 * (10 ** 6))
(tx_hash_hex, tx_rpc) = c.create(agent_roles['BOB'], agent_roles['ALICE'], 100 * (10 ** 6))
tx_signed_raw_hex = tx_rpc['params'][0]
register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database)

View File

@@ -1,7 +1,7 @@
crypto-dev-signer>=0.4.14b7,<=0.4.14
chainqueue~=0.0.2b6
confini>=0.3.6rc4,<0.5.0
cic-eth-registry>=0.5.7a1,<0.6.0
crypto-dev-signer~=0.4.14b6
chainqueue~=0.0.2b5
confini~=0.3.6rc4
cic-eth-registry~=0.5.6a1
redis==3.5.3
hexathon~=0.0.1a7
pycryptodome==3.10.1

View File

@@ -1,5 +1,4 @@
.git
.cache
.dot
**/doc
**/node_modules
**/doc

View File

@@ -2,8 +2,8 @@
.cic_meta_variables:
variables:
APP_NAME: cic-meta
DOCKERFILE_PATH: docker/Dockerfile_ci
CONTEXT: apps/$APP_NAME
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
CONTEXT: apps
build-mr-cic-meta:
extends:
@@ -21,7 +21,7 @@ test-mr-cic-meta:
stage: test
image: $MR_IMAGE_TAG
script:
- cd /root
- cd /tmp/src/cic-meta
- npm install --dev
- npm run test
- npm run test:coverage

View File

@@ -1,33 +1,31 @@
# syntax = docker/dockerfile:1.2
#FROM node:15.3.0-alpine3.10
FROM node:lts-alpine3.14
FROM node:15.3.0-alpine3.10
WORKDIR /root
WORKDIR /tmp/src/cic-meta
RUN apk add --no-cache postgresql bash
# required to build the cic-client-meta module
COPY cic-meta/src/ src/
COPY cic-meta/scripts/ scripts/
# copy the dependencies
COPY package.json package-lock.json .
RUN --mount=type=cache,mode=0755,target=/root/.npm \
npm set cache /root/.npm && \
npm ci
COPY cic-meta/package.json .
COPY cic-meta/tsconfig.json .
COPY cic-meta/webpack.config.js .
RUN npm install
COPY webpack.config.js .
COPY tsconfig.json .
## required to build the cic-client-meta module
COPY src/ src/
COPY scripts/ scripts/
COPY tests/ tests/
COPY tests/*.asc /root/pgp/
COPY cic-meta/tests/ tests/
COPY cic-meta/tests/*.asc /root/pgp/
## copy runtime configs
COPY .config/ /usr/local/etc/cic-meta/
#
## db migrations
COPY docker/db.sh ./db.sh
# copy runtime configs
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
# db migrations
COPY cic-meta/docker/db.sh ./db.sh
RUN chmod 755 ./db.sh
#
RUN alias tsc=node_modules/typescript/bin/tsc
COPY docker/start_server.sh ./start_server.sh
COPY cic-meta/docker/start_server.sh ./start_server.sh
RUN chmod 755 ./start_server.sh
ENTRYPOINT ["sh", "./start_server.sh"]

View File

@@ -1,32 +0,0 @@
# syntax = docker/dockerfile:1.2
#FROM node:15.3.0-alpine3.10
FROM node:lts-alpine3.14
WORKDIR /root
RUN apk add --no-cache postgresql bash
# copy the dependencies
COPY package.json package-lock.json .
RUN npm set cache /root/.npm && \
npm ci
COPY webpack.config.js .
COPY tsconfig.json .
## required to build the cic-client-meta module
COPY src/ src/
COPY scripts/ scripts/
COPY tests/ tests/
COPY tests/*.asc /root/pgp/
## copy runtime configs
COPY .config/ /usr/local/etc/cic-meta/
#
## db migrations
COPY docker/db.sh ./db.sh
RUN chmod 755 ./db.sh
#
RUN alias tsc=node_modules/typescript/bin/tsc
COPY docker/start_server.sh ./start_server.sh
RUN chmod 755 ./start_server.sh
ENTRYPOINT ["sh", "./start_server.sh"]

File diff suppressed because it is too large Load Diff

View File

@@ -36,7 +36,7 @@ test-mr-cic-notify:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
- apps/cic-eth/**/*
when: always
build-push-cic-notify:

View File

@@ -34,8 +34,6 @@ elif args.v:
config = confini.Config(args.c, args.env_prefix)
config.process()
config.add(args.q, '_CELERY_QUEUE', True)
config.censor('API_KEY', 'AFRICASTALKING')
config.censor('API_USERNAME', 'AFRICASTALKING')
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))

View File

@@ -9,7 +9,7 @@ import semver
logg = logging.getLogger()
version = (0, 4, 0, 'alpha.10')
version = (0, 4, 0, 'alpha.7')
version_object = semver.VersionInfo(
major=version[0],

View File

@@ -12,11 +12,14 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt
COPY . .
COPY . .
RUN python setup.py install
# TODO please review..can this go into requirements?
RUN pip install $pip_extra_index_url_flag .[africastalking,notifylog]
COPY docker/*.sh .
RUN chmod +x *.sh
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
@@ -24,5 +27,4 @@ RUN chmod +x *.sh
COPY .config/ /usr/local/etc/cic-notify/
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
ENTRYPOINT []

View File

@@ -11,12 +11,14 @@ RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt
COPY . .
COPY . .
RUN python setup.py install
# TODO please review..can this go into requirements?
RUN pip install $pip_extra_index_url_flag .[africastalking,notifylog]
COPY docker/*.sh .
RUN chmod +x *.sh
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables

View File

@@ -1,3 +1,3 @@
#!/bin/bash
set -e
python scripts/migrate.py -c /usr/local/etc/cic-notify --migrations-dir /usr/local/share/cic-notify/alembic -vv
migrate.py -c /usr/local/etc/cic-notify --migrations-dir /usr/local/share/cic-notify/alembic -vv

View File

@@ -1,5 +1,5 @@
#!/bin/bash
. /root/db.sh
. ./db.sh
/usr/local/bin/cic-notify-tasker -vv $@

View File

@@ -1,7 +1 @@
confini~=0.4.1a1
africastalking==1.2.3
SQLAlchemy==1.3.20
alembic==1.4.2
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3
cic_base[full_graph]==0.1.3a3+build.984b5cff

View File

@@ -35,10 +35,9 @@ elif args.v:
config = confini.Config(args.c, args.env_prefix)
config.process()
config.censor('API_KEY', 'AFRICASTALKING')
config.censor('API_USERNAME', 'AFRICASTALKING')
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
#config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_dir):

View File

@@ -29,11 +29,18 @@ packages =
cic_notify.db
cic_notify.db.models
cic_notify.ext
cic_notify.tasks
cic_notify.tasks.sms
cic_notify.runnable
scripts =
./scripts/migrate.py
scripts/migrate.py
[options.extras_require]
africastalking = africastalking==1.2.3
notifylog = psycopg2==2.8.6
testing =
pytest==6.0.1
pytest-celery==0.0.0a1
pytest-mock==3.3.1
pysqlite3==0.4.3
[options.entry_points]
console_scripts =

View File

@@ -2,4 +2,3 @@ pytest~=6.0.1
pytest-celery~=0.0.0a1
pytest-mock~=3.3.1
pysqlite3~=0.4.3
pytest-cov==2.10.1

View File

@@ -1,7 +1,7 @@
[app]
ALLOWED_IP=0.0.0.0/0
LOCALE_FALLBACK=sw
LOCALE_PATH=var/lib/locale/
LOCALE_FALLBACK=en
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
MAX_BODY_LENGTH=1024
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
SERVICE_CODE=*483*46#,*483*061#,*384*96#
@@ -11,13 +11,13 @@ SUPPORT_PHONE_NUMBER=0757628885
REGION=KE
[ussd]
MENU_FILE=data/ussd_menu.json
MENU_FILE=/usr/src/data/ussd_menu.json
user =
pass =
[statemachine]
STATES=states/
TRANSITIONS=transitions/
STATES=/usr/src/cic-ussd/states/
TRANSITIONS=/usr/src/cic-ussd/transitions/
[client]
host =

View File

@@ -1,5 +1,5 @@
[pgp]
export_dir = pgp/keys/
export_dir = /usr/src/pgp/keys/
keys_path = /usr/src/secrets/
private_keys = privatekeys_meta.asc
passphrase =

View File

@@ -36,7 +36,7 @@ test-mr-cic-ussd:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
- apps/cic-eth/**/*
when: always
build-push-cic-ussd:

View File

@@ -15,47 +15,45 @@ from cic_ussd.conversions import from_wei
logg = logging.getLogger()
def get_balances(
address: str,
chain_str: str,
token_symbol: str,
asynchronous: bool = False,
callback_param: any = None,
callback_task='cic_ussd.tasks.callback_handler.process_balances_callback') -> Union[celery.Task, dict]:
"""
This function queries cic-eth for an account's balances, It provides a means to receive the balance either
asynchronously or synchronously depending on the provided value for teh asynchronous parameter. It returns a
dictionary containing network, outgoing and incoming balances.
:param address: Ethereum address of the recipient
:type address: str, 0x-hex
:param chain_str: The chain name and network id.
:type chain_str: str
:param callback_param:
:type callback_param:
:param callback_task:
:type callback_task:
:param token_symbol: ERC20 token symbol of the account whose balance is being queried.
:type token_symbol: str
:param asynchronous: Boolean value checking whether to return balances asynchronously.
:type asynchronous: bool
:return:
:rtype:
"""
logg.debug(f'Retrieving balance for address: {address}')
if asynchronous:
cic_eth_api = Api(
chain_str=chain_str,
callback_queue='cic-ussd',
callback_task=callback_task,
callback_param=callback_param
)
cic_eth_api.balance(address=address, token_symbol=token_symbol)
else:
cic_eth_api = Api(chain_str=chain_str)
balance_request_task = cic_eth_api.balance(
address=address,
token_symbol=token_symbol)
return balance_request_task.get()[0]
class BalanceManager:
def __init__(self, address: str, chain_str: str, token_symbol: str):
"""
:param address: Ethereum address of account whose balance is being queried
:type address: str, 0x-hex
:param chain_str: The chain name and network id.
:type chain_str: str
:param token_symbol: ERC20 token symbol of whose balance is being queried
:type token_symbol: str
"""
self.address = address
self.chain_str = chain_str
self.token_symbol = token_symbol
def get_balances(self, asynchronous: bool = False) -> Union[celery.Task, dict]:
"""
This function queries cic-eth for an account's balances, It provides a means to receive the balance either
asynchronously or synchronously depending on the provided value for teh asynchronous parameter. It returns a
dictionary containing network, outgoing and incoming balances.
:param asynchronous: Boolean value checking whether to return balances asynchronously
:type asynchronous: bool
:return:
:rtype:
"""
if asynchronous:
cic_eth_api = Api(
chain_str=self.chain_str,
callback_queue='cic-ussd',
callback_task='cic_ussd.tasks.callback_handler.process_balances_callback',
callback_param=''
)
cic_eth_api.balance(address=self.address, token_symbol=self.token_symbol)
else:
cic_eth_api = Api(chain_str=self.chain_str)
balance_request_task = cic_eth_api.balance(
address=self.address,
token_symbol=self.token_symbol)
return balance_request_task.get()[0]
def compute_operational_balance(balances: dict) -> float:

View File

@@ -127,4 +127,3 @@ class SessionBase(Model):
logg.debug('commit and destroy session {}'.format(session_key))
session.commit()
session.close()
del SessionBase.localsessions[session_key]

View File

@@ -48,6 +48,3 @@ class InitializationError(Exception):
pass
class UnknownUssdRecipient(Exception):
"""Raised when a recipient of a transaction is not known to the ussd application."""

View File

@@ -127,19 +127,14 @@ class MetadataRequestsHandler(Metadata):
if not isinstance(result_data, dict):
raise ValueError(f'Invalid result data object: {result_data}.')
if result.status_code == 200:
if self.cic_type == ':cic.person':
# validate person metadata
person = Person()
person_data = person.deserialize(person_data=result_data)
if result.status_code == 200 and self.cic_type == ':cic.person':
# validate person metadata
person = Person()
person_data = person.deserialize(person_data=result_data)
# format new person data for caching
serialized_person_data = person_data.serialize()
data = json.dumps(serialized_person_data)
else:
data = json.dumps(result_data)
# format new person data for caching
data = json.dumps(person_data.serialize())
# cache metadata
cache_data(key=self.metadata_pointer, data=data)
logg.debug(f'caching: {data} with key: {self.metadata_pointer}')
return result_data

View File

@@ -1,7 +1,6 @@
# standard imports
# external imports
import celery
# local imports
from .base import MetadataRequestsHandler

View File

@@ -12,7 +12,7 @@ from tinydb.table import Document
# local imports
from cic_ussd.account import define_account_tx_metadata, retrieve_account_statement
from cic_ussd.balance import compute_operational_balance, get_balances, get_cached_operational_balance
from cic_ussd.balance import BalanceManager, compute_operational_balance, get_cached_operational_balance
from cic_ussd.chain import Chain
from cic_ussd.db.models.account import Account
from cic_ussd.db.models.base import SessionBase
@@ -182,6 +182,7 @@ def process_transaction_pin_authorization(account: Account, display_key: str, se
token_symbol = retrieve_token_symbol()
user_input = ussd_session.get('session_data').get('transaction_amount')
transaction_amount = to_wei(value=int(user_input))
logg.debug('Requires integration to determine user tokens.')
return process_pin_authorization(
account=account,
display_key=display_key,
@@ -379,9 +380,12 @@ def process_start_menu(display_key: str, user: Account):
token_symbol = retrieve_token_symbol()
chain_str = Chain.spec.__str__()
blockchain_address = user.blockchain_address
balance_manager = BalanceManager(address=blockchain_address,
chain_str=chain_str,
token_symbol=token_symbol)
# get balances synchronously for display on start menu
balances_data = get_balances(address=blockchain_address, chain_str=chain_str, token_symbol=token_symbol)
balances_data = balance_manager.get_balances()
key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]),

View File

@@ -28,7 +28,7 @@ elif args.v:
logging.getLogger().setLevel(logging.INFO)
# parse config
config = Config(args.c, env_prefix=args.env_prefix)
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))

View File

@@ -8,16 +8,13 @@ import tempfile
import celery
import i18n
import redis
from chainlib.chain import ChainSpec
from confini import Config
# local imports
from cic_ussd.chain import Chain
from cic_ussd.db import dsn_from_config
from cic_ussd.db.models.base import SessionBase
from cic_ussd.metadata.signer import Signer
from cic_ussd.metadata.base import Metadata
from cic_ussd.phone_number import Support
from cic_ussd.redis import InMemoryStore
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
from cic_ussd.validator import validate_presence
@@ -85,15 +82,6 @@ Signer.key_file_path = key_file_path
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
chain_spec = ChainSpec(
common_name=config.get('CIC_COMMON_NAME'),
engine=config.get('CIC_ENGINE'),
network_id=config.get('CIC_NETWORK_ID')
)
Chain.spec = chain_spec
Support.phone_number = config.get('APP_SUPPORT_PHONE_NUMBER')
# set up celery
current_app = celery.Celery(__name__)

View File

@@ -45,7 +45,7 @@ elif args.v:
logging.getLogger().setLevel(logging.INFO)
# parse config
config = Config(args.c, env_prefix=args.env_prefix)
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))

View File

@@ -1,4 +1,8 @@
# standard import
import os
import logging
import urllib
import json
# third-party imports
# this must be included for the package to be recognized as a tasks package
@@ -10,5 +14,3 @@ from .logger import *
from .ussd_session import *
from .callback_handler import *
from .metadata import *
from .notifications import *
from .processor import *

View File

@@ -7,14 +7,14 @@ from datetime import datetime, timedelta
import celery
# local imports
from cic_ussd.balance import compute_operational_balance, get_balances
from cic_ussd.chain import Chain
from cic_ussd.conversions import from_wei
from cic_ussd.db.models.base import SessionBase
from cic_ussd.db.models.account import Account
from cic_ussd.account import define_account_tx_metadata
from cic_ussd.error import ActionDataNotFoundError
from cic_ussd.redis import InMemoryStore, cache_data, create_cached_data_key, get_cached_data
from cic_ussd.redis import InMemoryStore, cache_data, create_cached_data_key
from cic_ussd.tasks.base import CriticalSQLAlchemyTask
from cic_ussd.transactions import IncomingTransactionProcessor
logg = logging.getLogger(__file__)
celery_app = celery.current_app
@@ -58,9 +58,9 @@ def process_account_creation_callback(self, result: str, url: str, status_code:
# add phone number metadata lookup
s_phone_pointer = celery.signature(
'cic_ussd.tasks.metadata.add_phone_pointer',
[result, phone_number]
)
'cic_ussd.tasks.metadata.add_phone_pointer',
[result, phone_number]
)
s_phone_pointer.apply_async(queue=queue)
# add custom metadata tags
@@ -87,106 +87,59 @@ def process_account_creation_callback(self, result: str, url: str, status_code:
session.close()
@celery_app.task(bind=True)
def process_transaction_callback(self, result: dict, param: str, status_code: int):
@celery_app.task
def process_incoming_transfer_callback(result: dict, param: str, status_code: int):
session = SessionBase.create_session()
if status_code == 0:
chain_str = Chain.spec.__str__()
# collect transaction metadata
destination_token_symbol = result.get('destination_token_symbol')
destination_token_value = result.get('destination_token_value')
# collect result data
recipient_blockchain_address = result.get('recipient')
sender_blockchain_address = result.get('sender')
source_token_symbol = result.get('source_token_symbol')
source_token_value = result.get('source_token_value')
token_symbol = result.get('destination_token_symbol')
value = result.get('destination_token_value')
# build stakeholder callback params
recipient_metadata = {
"token_symbol": destination_token_symbol,
"token_value": destination_token_value,
"blockchain_address": recipient_blockchain_address,
"tag": "recipient",
"tx_param": param
}
# try to find users in system
recipient_user = session.query(Account).filter_by(blockchain_address=recipient_blockchain_address).first()
sender_user = session.query(Account).filter_by(blockchain_address=sender_blockchain_address).first()
# retrieve account balances
get_balances(
address=recipient_blockchain_address,
callback_param=recipient_metadata,
chain_str=chain_str,
callback_task='cic_ussd.tasks.callback_handler.process_transaction_balances_callback',
token_symbol=destination_token_symbol,
asynchronous=True)
# only retrieve sender if transaction is a transfer
if param == 'transfer':
sender_metadata = {
"blockchain_address": sender_blockchain_address,
"token_symbol": source_token_symbol,
"token_value": source_token_value,
"tag": "sender",
"tx_param": param
}
get_balances(
address=sender_blockchain_address,
callback_param=sender_metadata,
chain_str=chain_str,
callback_task='cic_ussd.tasks.callback_handler.process_transaction_balances_callback',
token_symbol=source_token_symbol,
asynchronous=True)
else:
raise ValueError(f'Unexpected status code: {status_code}.')
@celery_app.task(bind=True)
def process_transaction_balances_callback(self, result: list, param: dict, status_code: int):
queue = self.request.delivery_info.get('routing_key')
if status_code == 0:
# retrieve balance data
balances_data = result[0]
operational_balance = compute_operational_balance(balances=balances_data)
# retrieve account's address
blockchain_address = param.get('blockchain_address')
# append balance to transaction metadata
transaction_metadata = param
transaction_metadata['operational_balance'] = operational_balance
# retrieve account's preferences
s_preferences_metadata = celery.signature(
'cic_ussd.tasks.metadata.query_preferences_metadata',
[blockchain_address],
queue=queue
)
# parse metadata and run validations
s_process_account_metadata = celery.signature(
'cic_ussd.tasks.processor.process_tx_metadata_for_notification',
[transaction_metadata],
queue=queue
)
# issue notification of transaction
s_notify_account = celery.signature(
'cic_ussd.tasks.notifications.notify_account_of_transaction',
queue=queue
)
if param.get('tx_param') == 'transfer':
celery.chain(s_preferences_metadata, s_process_account_metadata, s_notify_account).apply_async()
if param.get('tx_param') == 'tokengift':
s_process_account_metadata = celery.signature(
'cic_ussd.tasks.processor.process_tx_metadata_for_notification',
[{}, transaction_metadata],
queue=queue
# check whether recipient is in the system
if not recipient_user:
session.close()
raise ValueError(
f'Tx for recipient: {recipient_blockchain_address} was received but has no matching user in the system.'
)
celery.chain(s_process_account_metadata, s_notify_account).apply_async()
# process incoming transactions
incoming_tx_processor = IncomingTransactionProcessor(phone_number=recipient_user.phone_number,
preferred_language=recipient_user.preferred_language,
token_symbol=token_symbol,
value=value)
if param == 'tokengift':
incoming_tx_processor.process_token_gift_incoming_transactions()
elif param == 'transfer':
if sender_user:
sender_information = define_account_tx_metadata(user=sender_user)
incoming_tx_processor.process_transfer_incoming_transaction(
sender_information=sender_information,
recipient_blockchain_address=recipient_blockchain_address
)
else:
logg.warning(
f'Tx with sender: {sender_blockchain_address} was received but has no matching user in the system.'
)
incoming_tx_processor.process_transfer_incoming_transaction(
sender_information='GRASSROOTS ECONOMICS',
recipient_blockchain_address=recipient_blockchain_address
)
else:
session.close()
raise ValueError(f'Unexpected transaction param: {param}.')
else:
session.close()
raise ValueError(f'Unexpected status code: {status_code}.')
session.close()
@celery_app.task
def process_balances_callback(result: list, param: str, status_code: int):
@@ -198,7 +151,6 @@ def process_balances_callback(result: list, param: str, status_code: int):
salt=':cic.balances_data'
)
cache_data(key=key, data=json.dumps(balances_data))
logg.debug(f'caching: {balances_data} with key: {key}')
else:
raise ValueError(f'Unexpected status code: {status_code}.')

View File

@@ -26,7 +26,6 @@ def query_person_metadata(blockchain_address: str):
:rtype:
"""
identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address)
logg.debug(f'Retrieving person metadata for address: {blockchain_address}.')
person_metadata_client = PersonMetadata(identifier=identifier)
person_metadata_client.query()
@@ -73,15 +72,3 @@ def add_preferences_metadata(blockchain_address: str, data: dict):
identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address)
custom_metadata_client = PreferencesMetadata(identifier=identifier)
custom_metadata_client.create(data=data)
@celery_app.task()
def query_preferences_metadata(blockchain_address: str):
"""This method retrieves preferences metadata based on an account's blockchain address.
:param blockchain_address: Blockchain address of an account.
:type blockchain_address: str | Ox-hex
"""
identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address)
logg.debug(f'Retrieving preferences metadata for address: {blockchain_address}.')
person_metadata_client = PreferencesMetadata(identifier=identifier)
return person_metadata_client.query()

View File

@@ -1,70 +0,0 @@
# standard imports
import datetime
import logging
# third-party imports
import celery
# local imports
from cic_ussd.notifications import Notifier
from cic_ussd.phone_number import Support
celery_app = celery.current_app
logg = logging.getLogger(__file__)
notifier = Notifier()
@celery_app.task
def notify_account_of_transaction(notification_data: dict):
"""
:param notification_data:
:type notification_data:
:return:
:rtype:
"""
account_tx_role = notification_data.get('account_tx_role')
amount = notification_data.get('amount')
balance = notification_data.get('balance')
phone_number = notification_data.get('phone_number')
preferred_language = notification_data.get('preferred_language')
token_symbol = notification_data.get('token_symbol')
transaction_account_metadata = notification_data.get('transaction_account_metadata')
transaction_type = notification_data.get('transaction_type')
timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p')
if transaction_type == 'tokengift':
support_phone = Support.phone_number
notifier.send_sms_notification(
key='sms.account_successfully_created',
phone_number=phone_number,
preferred_language=preferred_language,
balance=balance,
support_phone=support_phone,
token_symbol=token_symbol
)
if transaction_type == 'transfer':
if account_tx_role == 'recipient':
notifier.send_sms_notification(
key='sms.received_tokens',
phone_number=phone_number,
preferred_language=preferred_language,
amount=amount,
token_symbol=token_symbol,
tx_sender_information=transaction_account_metadata,
timestamp=timestamp,
balance=balance
)
else:
notifier.send_sms_notification(
key='sms.sent_tokens',
phone_number=phone_number,
preferred_language=preferred_language,
amount=amount,
token_symbol=token_symbol,
tx_recipient_information=transaction_account_metadata,
timestamp=timestamp,
balance=balance
)

View File

@@ -1,88 +0,0 @@
# standard imports
import logging
# third-party imports
import celery
from i18n import config
# local imports
from cic_ussd.account import define_account_tx_metadata
from cic_ussd.db.models.account import Account
from cic_ussd.db.models.base import SessionBase
from cic_ussd.error import UnknownUssdRecipient
from cic_ussd.transactions import from_wei
celery_app = celery.current_app
logg = logging.getLogger(__file__)
@celery_app.task
def process_tx_metadata_for_notification(result: celery.Task, transaction_metadata: dict):
"""
:param result:
:type result:
:param transaction_metadata:
:type transaction_metadata:
:return:
:rtype:
"""
notification_data = {}
# get preferred language
preferred_language = result.get('preferred_language')
if not preferred_language:
preferred_language = config.get('fallback')
notification_data['preferred_language'] = preferred_language
# validate account information against present ussd storage data.
session = SessionBase.create_session()
blockchain_address = transaction_metadata.get('blockchain_address')
tag = transaction_metadata.get('tag')
account = session.query(Account).filter_by(blockchain_address=blockchain_address).first()
if not account and tag == 'recipient':
session.close()
raise UnknownUssdRecipient(
f'Tx for recipient: {blockchain_address} was received but has no matching user in the system.'
)
# get phone number associated with account
phone_number = account.phone_number
notification_data['phone_number'] = phone_number
# get account's role in transaction i.e sender / recipient
tx_param = transaction_metadata.get('tx_param')
notification_data['transaction_type'] = tx_param
# get token amount and symbol
if tag == 'recipient':
account_tx_role = tag
amount = transaction_metadata.get('token_value')
amount = from_wei(value=amount)
token_symbol = transaction_metadata.get('token_symbol')
else:
account_tx_role = tag
amount = transaction_metadata.get('token_value')
amount = from_wei(value=amount)
token_symbol = transaction_metadata.get('token_symbol')
notification_data['account_tx_role'] = account_tx_role
notification_data['amount'] = amount
notification_data['token_symbol'] = token_symbol
# get account's standard ussd identification pattern
if tx_param == 'transfer':
tx_account_metadata = define_account_tx_metadata(user=account)
notification_data['transaction_account_metadata'] = tx_account_metadata
if tag == 'recipient':
notification_data['notification_key'] = 'sms.received_tokens'
else:
notification_data['notification_key'] = 'sms.sent_tokens'
if tx_param == 'tokengift':
notification_data['notification_key'] = 'sms.account_successfully_created'
# get account's balance
notification_data['balance'] = transaction_metadata.get('operational_balance')
return notification_data

View File

@@ -7,9 +7,9 @@ from datetime import datetime
from cic_eth.api import Api
# local imports
from cic_ussd.balance import get_balances, get_cached_operational_balance
from cic_ussd.balance import get_cached_operational_balance
from cic_ussd.notifications import Notifier
from cic_ussd.phone_number import Support
logg = logging.getLogger()
notifier = Notifier()
@@ -50,6 +50,61 @@ def to_wei(value: int) -> int:
return int(value * 1e+6)
class IncomingTransactionProcessor:
def __init__(self, phone_number: str, preferred_language: str, token_symbol: str, value: int):
"""
:param phone_number: The recipient's phone number.
:type phone_number: str
:param preferred_language: The user's preferred language.
:type preferred_language: str
:param token_symbol: The symbol for the token the recipient receives.
:type token_symbol: str
:param value: The amount of tokens received in the transactions.
:type value: int
"""
self.phone_number = phone_number
self.preferred_language = preferred_language
self.token_symbol = token_symbol
self.value = value
def process_token_gift_incoming_transactions(self):
"""This function processes incoming transactions with a "tokengift" param, it collects all appropriate data to
send out notifications to users when their accounts are successfully created.
"""
balance = from_wei(value=self.value)
key = 'sms.account_successfully_created'
notifier.send_sms_notification(key=key,
phone_number=self.phone_number,
preferred_language=self.preferred_language,
balance=balance,
token_symbol=self.token_symbol)
def process_transfer_incoming_transaction(self, sender_information: str, recipient_blockchain_address: str):
"""This function processes incoming transactions with the "transfer" param and issues notifications to users
about reception of funds into their accounts.
:param sender_information: A string with a user's full name and phone number.
:type sender_information: str
:param recipient_blockchain_address:
type recipient_blockchain_address: str
"""
key = 'sms.received_tokens'
amount = from_wei(value=self.value)
timestamp = datetime.now().strftime('%d-%m-%y, %H:%M %p')
operational_balance = get_cached_operational_balance(blockchain_address=recipient_blockchain_address)
notifier.send_sms_notification(key=key,
phone_number=self.phone_number,
preferred_language=self.preferred_language,
amount=amount,
token_symbol=self.token_symbol,
tx_sender_information=sender_information,
timestamp=timestamp,
balance=operational_balance)
class OutgoingTransactionProcessor:
def __init__(self, chain_str: str, from_address: str, to_address: str):
@@ -61,7 +116,6 @@ class OutgoingTransactionProcessor:
:param to_address: Ethereum address of the recipient
:type to_address: str, 0x-hex
"""
self.chain_str = chain_str
self.cic_eth_api = Api(chain_str=chain_str)
self.from_address = from_address
self.to_address = to_address

View File

@@ -4,4 +4,4 @@
user_server_port=${SERVER_PORT:-9500}
/usr/local/bin/uwsgi --wsgi-file cic_ussd/runnable/daemons/cic_user_server.py --http :"$user_server_port" --pyargv "$@"
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_server.py --http :"$user_server_port" --pyargv "$@"

View File

@@ -4,4 +4,4 @@
user_ussd_server_port=${SERVER_PORT:-9000}
/usr/local/bin/uwsgi --wsgi-file cic_ussd/runnable/daemons/cic_user_ussd_server.py --http :"$user_ussd_server_port" --pyargv "$@"
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_ussd_server.py --http :"$user_ussd_server_port" --pyargv "$@"

View File

@@ -1,17 +1,4 @@
cic-eth~=0.12.2a3
cic-notify~=0.4.0a10
cic-types~=0.1.0a14
confini~=0.4.1a1
semver==2.13.0
alembic==1.4.2
SQLAlchemy==1.3.20
psycopg2==2.8.6
tinydb==4.2.0
phonenumbers==8.12.12
redis==3.5.3
celery==4.4.7
python-i18n[YAML]==0.3.9
pyxdg==0.27
bcrypt==3.2.0
uWSGI==2.0.19.1
transitions==0.8.4
cic_base[full_graph]==0.1.3a3+build.984b5cff
cic-eth~=0.12.0a1
cic-notify~=0.4.0a7
cic-types~=0.1.0a11

View File

@@ -30,7 +30,7 @@ arg_parser.add_argument('-vv', action='store_true', help='be more verbose')
args = arg_parser.parse_args()
config = Config(args.c, env_prefix=args.env_prefix)
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
logg.debug(f'config:\n{config}')

View File

@@ -3,7 +3,5 @@ en:
You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone}.
received_tokens: |-
Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp}. New balance is %{balance} %{token_symbol}.
sent_tokens: |-
Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp}. New balance is %{balance} %{token_symbol}.
terms: |-
By using the service, you agree to the terms and conditions at http://grassecon.org/tos

View File

@@ -2,8 +2,6 @@ sw:
account_successfully_created: |-
Umesajiliwa kwa huduma ya Sarafu! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone}.
received_tokens: |-
Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}.
sent_tokens: |-
Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}.
Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp}. Salio la %{token_symbol} ni %{balance}.
terms: |-
Kwa kutumia hii huduma, umekubali sheria na masharti yafuatayo http://grassecon.org/tos

View File

@@ -1,6 +1,4 @@
.git
.cache
.dot
**/doc
**/.venv
**/venv
**/doc

View File

@@ -0,0 +1,2 @@
[bancor]
dir = /usr/local/share/cic/bancor

View File

@@ -1,2 +0,0 @@
[chain]
spec = evm:ethereum:1

View File

@@ -1,4 +0,0 @@
[redis]
host = localhost
port = 63379
db = 0

View File

@@ -1,5 +0,0 @@
[rpc]
http_provider = http://localhost:8545
http_authentication =
http_username =
http_password =

View File

@@ -1,8 +0,0 @@
[token]
name =
symbol =
decimals =
demurrage_level =
redistribution_period =
sink_address =
supply_limit = 0

View File

@@ -1,3 +0,0 @@
[wallet]
key_file =
passphrase =

View File

@@ -14,18 +14,17 @@ RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C20
RUN mkdir -vp /usr/local/etc/cic
ENV CONFINI_DIR /usr/local/etc/cic/
#RUN mkdir -vp $CONFINI_DIR
RUN mkdir -vp $CONFINI_DIR
#ARG cic_config_commit=24287fb253196820f23ff8a7177b122f2cd99a11
#ARG cic_config_url=https://gitlab.com/grassrootseconomics/cic-config.git/
#RUN echo Install confini schema files && \
# git clone --depth 1 $cic_config_url cic-config && \
# cd cic-config && \
# git fetch --depth 1 origin $cic_config_commit && \
# git checkout $cic_config_commit && \
# cp -v *.ini $CONFINI_DIR
ARG cic_config_commit=0abe0867f18077907c7023bf0ef5e466a3984dd8
ARG cic_config_url=https://gitlab.com/grassrootseconomics/cic-config.git/
RUN echo Install confini schema files && \
git clone --depth 1 $cic_config_url cic-config && \
cd cic-config && \
git fetch --depth 1 origin $cic_config_commit && \
git checkout $cic_config_commit && \
cp -v *.ini $CONFINI_DIR
COPY config_template/ /usr/local/etc/cic/
COPY requirements.txt .
ARG pip_index_url=https://pypi.org/simple

View File

@@ -1,10 +1,5 @@
cic-eth[tools]==0.12.2a3
eth-erc20>=0.0.11a1,<0.1.0
erc20-demurrage-token>=0.0.2a5,<0.1.0
eth-address-index>=0.1.3a1,<0.2.0
eth-accounts-index>=0.0.13a1,<0.1.0
cic-eth-registry>=0.5.7a1,<=0.6.0
erc20-faucet>=0.2.3a1,<0.3.0
erc20-transfer-authorization>=0.3.3a1,<0.4.0
sarafu-faucet>=0.0.4a4,<0.1.0
chainlib-eth>=0.0.6a1,<0.1.0
cic_base[full]==0.1.3a4+build.ce68c833
sarafu-faucet~=0.0.4a1
cic-eth[tools]==0.12.0a2
eth-erc20~=0.0.10a3
erc20-demurrage-token==0.0.2a3

View File

@@ -3,9 +3,14 @@
set -a
default_token=giftable_erc20_token
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL}
TOKEN_NAME=${TOKEN_NAME}
TOKEN_NAME=${TOKEN_NAME:-$TOKEN_SYMBOL}
TOKEN_TYPE=${TOKEN_TYPE:-$default_token}
if [ $TOKEN_TYPE == 'default' ]; then
>&2 echo resolving "default" token to $default_token
TOKEN_TYPE=$default_token
fi
cat <<EOF
external token settings:
token_type: $TOKEN_TYPE
@@ -18,6 +23,7 @@ token_supply_limit: $TOKEN_SUPPLY_LIMIT
EOF
CIC_CHAIN_SPEC=${CIC_CHAIN_SPEC:-evm:bloxberg:8995}
TOKEN_SYMBOL=${TOKEN_SYMBOL:-GFT}
DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000}
@@ -69,34 +75,17 @@ if [[ -n "${ETH_PROVIDER}" ]]; then
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
fi
if [ "$TOKEN_TYPE" == "$default_token" ]; then
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="GFT"
TOKEN_NAME="Giftable Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
if [ $TOKEN_TYPE == $default_token ]; then
>&2 echo deploying default token $TOKEN_TYPE
echo giftable-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
DEV_RESERVE_ADDRESS=`giftable-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="SARAFU"
TOKEN_NAME="Sarafu Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
DEV_RESERVE_ADDRESS=`giftable-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -ww --name $TOKEN_NAME --symbol $TOKEN_SYMBOL --decimals 6 -vv`
elif [ $TOKEN_TYPE == 'erc20_demurrage_token' ]; then
>&2 echo deploying token $TOKEN_TYPE
if [ -z $TOKEN_SINK_ADDRESS ]; then
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
fi
fi
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww`
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -ww`
else
>&2 echo unknown token type $TOKEN_TYPE
exit 1
@@ -148,7 +137,6 @@ else
fi
mkdir -p $CIC_DATA_DIR
>&2 echo using data dir $CIC_DATA_DIR for environment variable dump
# this is consumed in downstream services to set environment variables
cat << EOF > $CIC_DATA_DIR/.env
@@ -158,7 +146,6 @@ export CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS
EOF
cat ./envlist | bash from_env.sh > $CIC_DATA_DIR/.env_all
cat ./envlist
# popd
set +a

View File

@@ -15,6 +15,10 @@ CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
ETH_PASSPHRASE=''
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
if [[ $CIC_DEFAULT_TOKEN_SYMBOL != 'GFT' && $CIC_DEFAULT_TOKEN_SYMBOL != 'SRF' ]]; then
>&2 echo CIC_DEFAULT_TOKEN_SYMBOL must be one of [GFT,SRF], but was $CIC_DEFAULT_TOKEN_SYMBOL
exit 1
fi
# Debug flag
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
@@ -44,18 +48,18 @@ EOF
>&2 echo "create account for gas gifter"
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --timeout 120 $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER >> $env_out_file
cic-eth-tag -i $CIC_CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
>&2 echo "create account for sarafu gifter"
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER >> $env_out_file
cic-eth-tag -i $CIC_CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
>&2 echo "create account for approval escrow owner"
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=$DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER >> $env_out_file
cic-eth-tag -i $CIC_CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
@@ -65,23 +69,21 @@ cic-eth-tag -i $CIC_CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRA
#cic-eth-tag FAUCET_GIFTER $DEV_ETH_ACCOUNT_FAUCET_OWNER
>&2 echo "create account for accounts index writer"
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER >> $env_out_file
cic-eth-tag -i $CIC_CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
>&2 echo "add acccounts index writer account as writer on contract"
eth-accounts-index-writer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
# Transfer gas to custodial gas provider adddress
_CONFINI_DIR=$CONFINI_DIR
unset CONFINI_DIR
>&2 echo gift gas to gas gifter
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount
>&2 echo gift gas to sarafu token owner
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $gas_amount
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER $gas_amount
>&2 echo gift gas to account index owner
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $gas_amount
>&2 eth-gas --send -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $gas_amount
# Send token to token creator
@@ -102,7 +104,6 @@ export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
#echo -n 0 > $init_level_file
CONFINI_DIR=$_CONFINI_DIR
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
cic-eth-ctl -i :: unlock INIT
cic-eth-ctl -i :: unlock SEND

View File

@@ -1,8 +1,4 @@
.git
.cache
.dot
**/doc
node_modules/
**/venv
**/.venv
**/doc

View File

@@ -167,7 +167,7 @@ If you have previously run the `cic_ussd` import incompletely, it could be a goo
Then, in sequence, run in first terminal:
`python cic_ussd/import_balance.py -v -c config -p <eth_provider> -r <cic_registry_address> --token-symbol <token_symbol> -y ../contract-migration/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c out`
`python cic_eth/import_balance.py -v -c config -p <eth_provider> -r <cic_registry_address> --token-symbol <token_symbol> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c out`
In second terminal:

View File

@@ -11,6 +11,7 @@ import csv
import json
# external imports
import eth_abi
import confini
from hexathon import (
strip_0x,
@@ -28,10 +29,7 @@ from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import TxFactory
from chainlib.jsonrpc import JSONRPCRequest
from chainlib.eth.error import (
EthException,
RequestMismatchException,
)
from chainlib.eth.error import EthException
from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
@@ -39,9 +37,6 @@ from crypto_dev_signer.keystore.dict import DictKeystore
from cic_types.models.person import Person
from eth_erc20 import ERC20
from cic_base.eth.syncer import chain_interface
from eth_accounts_index import AccountsIndex
from eth_contract_registry import Registry
from eth_token_index import TokenUniqueSymbolIndex
logging.basicConfig(level=logging.WARNING)
@@ -136,52 +131,58 @@ class Handler:
logg.debug('no payload, skipping {}'.format(tx))
return
recipient = None
try:
r = AccountsIndex.parse_add_request(tx.payload)
except RequestMismatchException:
return
recipient = r[0]
user_file = 'new/{}/{}/{}.json'.format(
recipient[2:4].upper(),
recipient[4:6].upper(),
recipient[2:].upper(),
)
filepath = os.path.join(self.user_dir, user_file)
o = None
try:
f = open(filepath, 'r')
o = json.load(f)
if tx.payload[:8] == self.account_index_add_signature:
recipient = eth_abi.decode_single('address', bytes.fromhex(tx.payload[-64:]))
#original_address = to_checksum_address(self.addresses[to_checksum_address(recipient)])
user_file = 'new/{}/{}/{}.json'.format(
recipient[2:4].upper(),
recipient[4:6].upper(),
recipient[2:].upper(),
)
filepath = os.path.join(self.user_dir, user_file)
o = None
try:
f = open(filepath, 'r')
o = json.load(f)
f.close()
except FileNotFoundError:
logg.error('no import record of address {}'.format(recipient))
return
u = Person.deserialize(o)
original_address = u.identities[old_chain_spec.engine()]['{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())][0]
try:
balance = self.balances[original_address]
except KeyError as e:
logg.error('balance get fail orig {} new {}'.format(original_address, recipient))
return
# TODO: store token object in handler ,get decimals from there
multiplier = 10**6
balance_full = balance * multiplier
logg.info('registered {} originally {} ({}) tx hash {} balance {}'.format(recipient, original_address, u, tx.hash, balance_full))
(tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full)
logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient))
r = conn.do(o)
tx_path = os.path.join(
user_dir,
'txs',
strip_0x(tx_hash_hex),
)
f = open(tx_path, 'w')
f.write(strip_0x(o['params'][0]))
f.close()
except FileNotFoundError:
logg.error('no import record of address {}'.format(recipient))
return
u = Person.deserialize(o)
original_address = u.identities[old_chain_spec.engine()]['{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())][0]
try:
balance = self.balances[original_address]
except KeyError as e:
logg.error('balance get fail orig {} new {}'.format(original_address, recipient))
return
# TODO: store token object in handler ,get decimals from there
multiplier = 10**6
balance_full = balance * multiplier
logg.info('registered {} originally {} ({}) tx hash {} balance {}'.format(recipient, original_address, u, tx.hash, balance_full))
(tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full)
logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient))
r = conn.do(o)
tx_path = os.path.join(
user_dir,
'txs',
strip_0x(tx_hash_hex),
)
f = open(tx_path, 'w')
f.write(strip_0x(o['params'][0]))
f.close()
# except TypeError as e:
# logg.warning('typerror {}'.format(e))
# pass
# except IndexError as e:
# logg.warning('indexerror {}'.format(e))
# pass
# except EthException as e:
# logg.error('send error {}'.format(e).ljust(200))
#except KeyError as e:
# logg.error('key record not found in imports: {}'.format(e).ljust(200))
def progress_callback(block_number, tx_index):
@@ -197,26 +198,49 @@ def main():
nonce_oracle = RPCNonceOracle(signer_address, conn)
# Get Token registry address
registry = Registry(chain_spec)
o = registry.address_of(config.get('CIC_REGISTRY_ADDRESS'), 'TokenRegistry')
txf = TxFactory(chain_spec, signer=signer, gas_oracle=gas_oracle, nonce_oracle=None)
tx = txf.template(signer_address, config.get('CIC_REGISTRY_ADDRESS'))
registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8]
data = add_0x(registry_addressof_method)
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
txf.set_code(tx, data)
j = JSONRPCRequest()
o = j.template()
o['method'] = 'eth_call'
o['params'].append(txf.normalize(tx))
o['params'].append('latest')
o = j.finalize(o)
r = conn.do(o)
token_index_address = registry.parse_address_of(r)
token_index_address = to_checksum_address(token_index_address)
token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
logg.info('found token index address {}'.format(token_index_address))
# Get Sarafu token address
token_index = TokenUniqueSymbolIndex(chain_spec)
o = token_index.address_of(token_index_address, token_symbol)
tx = txf.template(signer_address, token_index_address)
data = add_0x(registry_addressof_method)
h = hashlib.new('sha256')
h.update(token_symbol.encode('utf-8'))
z = h.digest()
data += eth_abi.encode_single('bytes32', z).hex()
txf.set_code(tx, data)
o = j.template()
o['method'] = 'eth_call'
o['params'].append(txf.normalize(tx))
o['params'].append('latest')
o = j.finalize(o)
r = conn.do(o)
token_address = token_index.parse_address_of(r)
try:
token_address = to_checksum_address(token_address)
sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
except ValueError as e:
logg.critical('lookup failed for token {}: {}'.format(token_symbol, e))
sys.exit(1)
logg.info('found token address {}'.format(token_address))
sys.exit(0)
if sarafu_token_address == ZERO_ADDRESS:
raise KeyError('token address for symbol {} is zero'.format(token_symbol))
logg.info('found token address {}'.format(sarafu_token_address))
syncer_backend = MemBackend(chain_str, 0)
@@ -224,6 +248,22 @@ def main():
o = block_latest()
r = conn.do(o)
block_offset = int(strip_0x(r), 16) + 1
#
# addresses = {}
# f = open('{}/addresses.csv'.format(user_dir, 'r'))
# while True:
# l = f.readline()
# if l == None:
# break
# r = l.split(',')
# try:
# k = r[0]
# v = r[1].rstrip()
# addresses[k] = v
# sys.stdout.write('loading address mapping {} -> {}'.format(k, v).ljust(200) + "\r")
# except IndexError as e:
# break
# f.close()
# TODO get decimals from token
balances = {}

View File

@@ -11,6 +11,7 @@ const config = new crdt.Config('./config');
config.process();
console.log(config);
function sendit(uid, envelope) {
const d = envelope.toJSON();

View File

@@ -17,7 +17,7 @@ default_config_dir = '/usr/local/etc/cic'
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config file')
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='Task queue')
arg_parser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
arg_parser.add_argument('-v', action='store_true', help='Be verbose')
arg_parser.add_argument('-vv', action='store_true', help='Be more verbose')
arg_parser.add_argument('user_dir', type=str, help='path to users export dir tree')

View File

@@ -206,7 +206,7 @@ def gen():
# fake.local_latitude()
p.location['latitude'] = (random.random() * 180) - 90
# fake.local_latitude()
p.location['longitude'] = (random.random() * 360) - 180
p.location['longitude'] = (random.random() * 360) - 179
return (old_blockchain_checksum_address, phone, p)

View File

@@ -1,12 +0,0 @@
ETH_PROVIDER=http://eth:8545
CELERY_BROKER_URL=redis://redis:6379
CELERY_RESULT_URL=redis://redis:6379
CIC_REGISTRY_ADDRESS=0xea6225212005e86a4490018ded4bf37f3e772161
TOKEN_SYMBOL=GFT
REDIS_HOST=redis
REDIS_PORT=6379
USER_USSD_HOST=cic-user-ussd-server
USER_USSD_PORT=9000
KEYSTORE_FILE_PATH=/root/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
OUT_DIR=out
NUMBER_OF_USERS=100

View File

@@ -8,9 +8,11 @@ RUN mkdir -vp /usr/local/etc/cic
COPY package.json \
package-lock.json \
.
RUN npm ci --production
RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
COPY requirements.txt .
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip pip install \
@@ -19,5 +21,4 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip pip install \
COPY . .
ENTRYPOINT [ ]

Some files were not shown because too many files have changed in this diff Show More