Compare commits

...

135 Commits

Author SHA1 Message Date
philip 004e30af31
Merge branch 'master' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/improve-cache 2022-01-04 18:59:11 +03:00
nolash 7618abcda3
Bump deps 2021-12-22 21:31:39 +00:00
nolash 94d8ddb164
Amend okota dependency conflict 2021-12-22 20:34:12 +00:00
nolash 7f958d4be8
Rehabiliate test for balance incoming check (already counted from PENDING state) 2021-12-22 20:23:02 +00:00
nolash 03b06ca8c1 Merge remote-tracking branch 'origin/master' into lash/improve-cache 2021-12-22 20:09:54 +00:00
nolash 14449f5c6d
Rehabilitate tests for cic-eth 2021-12-22 20:02:05 +00:00
nolash 15618fa061
Rehabilitate tests after url format change 2021-12-22 19:12:08 +00:00
nolash 3a52a78e93
Change poker method 2021-12-21 14:23:33 +00:00
nolash 6562d37a30
Add demurrage poker script 2021-12-21 14:19:26 +00:00
nolash c5efa56885
Loosen requirements restrictions, fix missing explicit nonce in global contract deploy last step 2021-12-21 14:18:09 +00:00
philip d6346bb87b
Merge "origin/master" into "lash/improve-cache" 2021-12-20 11:14:04 +03:00
philip 9050d331cd
Bumps lib versions 2021-12-20 11:12:39 +03:00
philip 99997df248
Bumps deps 2021-12-20 11:11:23 +03:00
philip d04a4bf5c6
Adds missing import: tempfile 2021-12-20 11:10:35 +03:00
philip 43c49dd527
Adds minor fixes:
- Added appropriate imports:
  - from okota.token_index.index import to_identifier
  - from cic_eth.error import SignerError
- cleaned up balance func.
2021-12-20 11:07:41 +03:00
philip 511557c242
Cleaned up docker-compose.yml:
- Added ETH_MIN_FEE_LIMIT.
- Removed "" from callback task definitions.
2021-12-20 11:03:23 +03:00
philip 887799962f
Couldn't get `python setup.py install` to work without it fussing:
```
 => => # Processing dependencies for cic-cache==0.3.1
 => => # Searching for eth-contract-registry~=0.7.1a2
 => => # Reading https://pypi.org/simple/eth-contract-registry/
 => => # Couldn't retrieve index page for 'eth-contract-registry'
 => => # Scanning index of all packages (this may take a while)
 => => # Reading https://pypi.org/simple/
```
2021-12-20 11:01:39 +03:00
philip 3acc3cf417
Bumps deps. 2021-12-20 10:58:21 +03:00
philip ceeb246ce2
Retires unused override_requirements.txt file. 2021-12-20 10:56:30 +03:00
philip 45499ec839
Refactor to match proof checked for by verify_proofs task. 2021-12-20 10:56:01 +03:00
philip 77bdee049c
Adds token decimals to demurrage token deployment. 2021-12-20 10:54:28 +03:00
nolash 0cf6489f49 Merge remote-tracking branch 'origin/master' into lash/improve-cache 2021-12-08 06:44:31 +01:00
nolash 31256b3650
Remove custom port in pip url in dockers 2021-12-07 21:54:04 +01:00
nolash 380550cb84
Add higher fee limit in base task 2021-12-07 21:45:07 +01:00
nolash a356585c6a
Remove port from pip 2021-12-06 17:45:44 +01:00
nolash 4809bc8c22
Bump confini 2021-12-06 14:21:26 +01:00
nolash 760f618943
WIP upgrade deps 2021-12-04 11:39:13 +01:00
nolash 39de1837c2
Upgrade deps to non-prerelease (temporarily removed transfer-auth) 2021-11-15 14:23:24 +01:00
nolash 97e45c87d7
WIP move to whole patch versions in deps 2021-11-15 14:07:54 +01:00
nolash 4658a5d8e5
Bump cic-cache version 2021-11-13 07:51:01 +01:00
nolash 995d4e0bd0
Add remaining database name prefix changes 2021-11-10 09:55:30 +01:00
nolash 140b72a72b
Use database prefix instead of name 2021-11-10 09:07:23 +01:00
nolash 21b0c4a48b
Change query parse order 2021-11-08 09:58:22 +01:00
nolash 0b66462c11
Update openapi spec, enable queries with no ranges 2021-11-04 09:42:35 +01:00
nolash f18f865231
WIP openapi spec for cic-cache-server 2021-11-04 07:59:38 +01:00
nolash ad1c241a85
Reorganize url path params in cic-cache-server 2021-11-04 06:06:34 +01:00
nolash 99b0fb5aed Merge branch 'lash/verify-cache' into lash/bloxberg-seeding 2021-11-04 04:26:50 +01:00
nolash 29423449b7 Merge remote-tracking branch 'origin/master' into lash/verify-cache 2021-11-04 04:23:47 +01:00
nolash 58e766aa58
Remove explicit config in db migration 2021-11-04 04:18:27 +01:00
nolash 2ebcd3e3de Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-11-02 18:49:49 +01:00
nolash c440b049cc
Add config dirs 2021-11-02 16:35:44 +01:00
nolash 09034af5bc
Bump cic-eth version 2021-11-02 16:03:29 +01:00
nolash dc80bae673
Upgrade cic-eth in migrations 2021-11-02 15:31:00 +01:00
nolash d88ae00b72
Add celery cli args with defaults from redis 2021-10-31 07:58:35 +01:00
nolash 7a366edb9d
WIP rehabilitate cic-eth-inspect 2021-10-30 19:09:17 +02:00
nolash 0b912b99b6
Add role listing to cic-eth tag cli tool 2021-10-30 13:19:31 +02:00
nolash cbd4aef004
Add action confirm on sweep script 2021-10-30 10:25:39 +02:00
nolash 6f7f91780b
Add script to sweep gas from signer accounts 2021-10-30 09:02:04 +02:00
nolash 83ecdaf023
Connect token filter to tracker 2021-10-29 16:35:11 +02:00
nolash e2ef9b43c8
Reactivate cic-eth-tasker dependency for bootstrap 2021-10-29 15:58:34 +02:00
nolash 6e58e4e4de
Remove nasty residue from bootstrap 2021-10-29 14:40:06 +02:00
nolash f46c9b0e7d Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-10-29 11:39:40 +02:00
nolash 6ca3fd55d7
Add gas cache oracle connection for erc20 2021-10-29 08:45:42 +02:00
nolash 258ed420b8 Merge branch 'lash/tmp-bloxberg-seeding' into lash/bloxberg-seeding 2021-10-29 07:35:08 +02:00
nolash 1c022e9853
Added changes to wrong branch 2021-10-29 07:33:38 +02:00
nolash d35e144723
Register gas cache only for registered tokens 2021-10-29 07:00:25 +02:00
nolash fb953d0318
Add gas cache backend, test, filter 2021-10-28 21:45:47 +02:00
nolash 858bbdb69a Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-28 14:36:45 +02:00
nolash 66e23e4e20
Test config cleanup 2021-10-28 14:11:11 +02:00
nolash 546256c86a
Better gas gifting amounts and thresholds estimation, fix broken cic-eth imports 2021-10-28 13:34:39 +02:00
nolash d9720bd0aa Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:41:27 +02:00
nolash e9e9f66d97
Correct wrong change for docker registries 2021-10-28 05:39:44 +02:00
nolash 0d640fab57 Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:29:07 +02:00
nolash 4ce85bc824
Remove faulty default registry in dockerfiles 2021-10-28 05:27:13 +02:00
nolash ce67f83457
Remove faulty default registry in docker compose 2021-10-28 05:24:11 +02:00
nolash 13f2e17931
Remove accidental 0 value override for syncer offset to trackers 2021-10-28 05:18:54 +02:00
nolash f236234682 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-27 16:58:38 +02:00
nolash 1f37632f0f
WIP Replace env vars in data-seeding with well-known 2021-10-27 16:56:03 +02:00
nolash 03d7518f8c Merge branch 'lash/local-dev-improve' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/local-dev-improve 2021-10-27 11:52:31 +02:00
nolash 67152d0df1
Replace KEYSTORE_PATH with WALLET_KEY_FILE in data seeding 2021-10-27 11:51:20 +02:00
PhilipWafula 9168322941
Revert base image changes. 2021-10-27 12:41:35 +03:00
PhilipWafula 2fbd338e24
Adds correct base image. 2021-10-27 11:44:23 +03:00
PhilipWafula c7d7f2a64d
Remove force reset. 2021-10-27 11:44:08 +03:00
PhilipWafula 16153df2f0
Resolve creation of phone dir when it already exists. 2021-10-27 11:43:35 +03:00
nolash 4391fa3aff Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-25 21:01:27 +02:00
nolash 7ce68021bd Merge remote-tracking branch 'origin/master' into lash/verify-cache 2021-10-25 20:20:40 +02:00
nolash cd602dee49
Remove WIP docker compose file 2021-10-25 20:12:32 +02:00
nolash a548ba6fce
Chainlib upgrade to handle none receipts, rpc node debug output in bootstrap 2021-10-25 20:09:35 +02:00
nolash a6de7e9fe0 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-20 20:02:19 +02:00
nolash e705a94873
Resolve notify/ussd dependency conflict 2021-10-20 10:07:19 +02:00
nolash 3923de0a81
Update pip args handling in notify 2021-10-19 23:01:55 +02:00
nolash 5c0250b5b9
Rehabilitate cic-cache db migration 2021-10-19 22:58:10 +02:00
nolash 3285d8dfe5
Implement asynchronous deploys in bootstrap 2021-10-19 22:08:17 +02:00
nolash 9d349f1579
Add debug level env var to bootstrap dev container 2021-10-19 19:54:59 +02:00
nolash 837a1770d1
Upgrade deps more chainlib in bootstrap 2021-10-19 10:10:39 +02:00
PhilipWafula 003febec9d
Bumps contract migration deps. 2021-10-19 10:38:21 +03:00
PhilipWafula f066a32ce8
Adds libffi-dev for local git-tea. 2021-10-19 10:38:08 +03:00
nolash ad493705ad
Upgrade deps 2021-10-18 17:16:28 +02:00
nolash b765c4ab88
More wrestling with chainlib-eth deps 2021-10-18 17:06:31 +02:00
nolash e4935d3b58 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 16:49:58 +02:00
nolash f88f0e321b
Upgrade chainlib-eth dep 2021-10-18 16:48:14 +02:00
PhilipWafula 31fa721397
Add cic-notify container 2021-10-18 17:17:53 +03:00
PhilipWafula 16481da193
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 16:54:23 +03:00
PhilipWafula 97a48cd8c6
Improves ussd deps. 2021-10-18 16:53:38 +03:00
nolash 7732412341 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 15:51:38 +02:00
nolash 649b124a61
Ugprade chainqueue dep 2021-10-18 15:50:45 +02:00
PhilipWafula 7601e3eeff
Corrects breakages in cic-ussd 2021-10-18 15:19:32 +03:00
PhilipWafula 60a9efc88b
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 15:18:33 +03:00
PhilipWafula 45011b58c4
Cleans up configs. 2021-10-18 15:11:31 +03:00
nolash f1a0b4ee7c Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 14:10:52 +02:00
nolash c57abb7ad5
Upgrade deps in cic-eth, allow for new chain spec format 2021-10-18 14:08:39 +02:00
PhilipWafula 930a99c974
Bumps cic-types version. 2021-10-18 06:52:49 +03:00
PhilipWafula b0935caab8
Fixes imports. 2021-10-18 06:52:28 +03:00
nolash bdd5f6fcec
Update readme in data seeding 2021-10-17 19:37:29 +02:00
nolash a293c2460e
Consolidate dir handling in data seeding scripts 2021-10-17 19:27:15 +02:00
nolash 0ee6400d7d
WIP rehabilitate ussd builds 2021-10-17 18:32:08 +02:00
nolash 677fb346fd
Add data seeding preparation step, rehabilitation of non-custodial seeding 2021-10-17 18:05:00 +02:00
nolash ea3c75e755
Rehabilitate traffic script 2021-10-17 14:30:42 +02:00
nolash 0b2f22c416
Rehabilitate cic-user-server 2021-10-16 20:54:41 +02:00
nolash 24385ea27d
Rehabilitate cic-cache 2021-10-16 14:03:05 +02:00
nolash 9a154a8046
WIP rehabilitate cic-cache 2021-10-16 08:23:32 +02:00
nolash d3576c8ec7
Add eth retrier to new docker compose file 2021-10-16 07:08:44 +02:00
nolash 79ee2bf4ff
Add eth tracker, dispatcher to new docker compose file 2021-10-16 07:04:19 +02:00
nolash 89ac70371a
Remove single function worker in test 2021-10-16 00:18:08 +02:00
nolash 5ea0318b0b
Fix default token symbol config setting for aux 2021-10-15 23:21:57 +02:00
nolash 5dfb96ec0c
Add new cic-signer app 2021-10-15 23:11:00 +02:00
nolash 4634ac41df Merge remote-tracking branch 'origin/master' into lash/split-migration 2021-10-15 22:19:01 +02:00
nolash 97f4fe8ca7
refactor docker-compose cic-eth-tasker, bootstrap (aka contract migration) 2021-10-15 22:16:45 +02:00
nolash b36529f7fa
WIP local docker registry adaptations 2021-10-15 20:27:03 +02:00
nolash a6675f2348
Add environment sourcing for cic-eth-tasker docker compose 2021-10-15 18:52:37 +02:00
nolash e3116d74d6
No export 2021-10-15 12:54:16 +02:00
nolash c0bbdc9bec
Add missing file 2021-10-15 08:43:04 +02:00
nolash 396bd4f300
update preliminary readme 2021-10-15 08:38:01 +02:00
nolash 58547b4067
Bump cic-eth-registry 2021-10-15 07:44:50 +02:00
nolash 9009815d78
Add trust address to contract migration config, get cic-eth default token from registry 2021-10-14 21:31:04 +02:00
nolash 2da19f5819
Add basic connectivity config directives 2021-10-14 17:40:53 +02:00
nolash 3948d5aa40
Add custodial initialization 2021-10-14 17:18:49 +02:00
nolash ed432abb23
WIP refactor custodial initialization 2021-10-14 14:37:48 +02:00
nolash f251b8b729
Remove dead code 2021-10-14 11:35:08 +02:00
nolash 36e791e08a
Split contract migration into three separate steps 2021-10-14 11:33:50 +02:00
nolash 71a7e3d3d5
Reinstate test config dir 2021-10-09 17:23:38 +02:00
nolash 335b7b30a4
Add okota dep 2021-10-09 16:40:28 +02:00
nolash 3b1f470ddf
Add empty config dir 2021-10-09 16:33:40 +02:00
nolash 4c9f20aa7f
Add explicit zero length tx lsit check for cic-cache verify 2021-10-08 11:26:09 +02:00
nolash 980191be4f
Add verify check for cache, use chainlib cli for cic-cache 2021-10-08 11:19:21 +02:00
74 changed files with 1057 additions and 394 deletions

View File

@ -1 +1 @@
include *requirements.txt cic_cache/data/config/* include *requirements.txt cic_cache/data/config/* cic_cache/db/migrations/default/* cic_cache/db/migrations/default/versions/*

View File

@ -1,4 +1,4 @@
[cic] [cic]
registry_address = registry_address =
trust_address = trust_address =
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas health_modules =

View File

@ -3,7 +3,8 @@ engine =
driver = driver =
host = host =
port = port =
name = cic-cache #name = cic-cache
prefix =
user = user =
password = password =
debug = 0 debug = 0

View File

@ -9,21 +9,26 @@ from .list import (
tag_transaction, tag_transaction,
add_tag, add_tag,
) )
from cic_cache.db.models.base import SessionBase
logg = logging.getLogger() logg = logging.getLogger()
def dsn_from_config(config): def dsn_from_config(config, name):
scheme = config.get('DATABASE_ENGINE') scheme = config.get('DATABASE_ENGINE')
if config.get('DATABASE_DRIVER') != None: if config.get('DATABASE_DRIVER') != None:
scheme += '+{}'.format(config.get('DATABASE_DRIVER')) scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
database_name = name
if config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(config.get('DATABASE_PREFIX'), database_name)
dsn = '' dsn = ''
if config.get('DATABASE_ENGINE') == 'sqlite': if config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.poolable = False
dsn = '{}:///{}'.format( dsn = '{}:///{}'.format(
scheme, scheme,
config.get('DATABASE_NAME'), database_name,
) )
else: else:
@ -33,7 +38,7 @@ def dsn_from_config(config):
config.get('DATABASE_PASSWORD'), config.get('DATABASE_PASSWORD'),
config.get('DATABASE_HOST'), config.get('DATABASE_HOST'),
config.get('DATABASE_PORT'), config.get('DATABASE_PORT'),
config.get('DATABASE_NAME'), database_name,
) )
logg.debug('parsed dsn from config: {}'.format(dsn)) logg.debug('parsed dsn from config: {}'.format(dsn))
return dsn return dsn

View File

@ -5,7 +5,11 @@ import re
import base64 import base64
# external imports # external imports
from hexathon import add_0x from hexathon import (
add_0x,
strip_0x,
)
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache.cache import ( from cic_cache.cache import (
@ -16,27 +20,72 @@ from cic_cache.cache import (
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
#logg = logging.getLogger() #logg = logging.getLogger()
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?' re_transactions_all_bloom = r'/tx/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?' re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?' re_transactions_all_data = r'/txa/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_default_limit = r'/defaultlimit/?'
DEFAULT_LIMIT = 100 DEFAULT_LIMIT = 100
tx_normalize = TxHexNormalizer()
def parse_query_account(r):
address = strip_0x(r[1])
#address = tx_normalize.wallet_address(address)
limit = DEFAULT_LIMIT
g = r.groups()
if len(g) > 3:
limit = int(r[4])
if limit == 0:
limit = DEFAULT_LIMIT
offset = 0
if len(g) > 4:
offset = int(r[6])
logg.debug('account query is address {} offset {} limit {}'.format(address, offset, limit))
return (address, offset, limit,)
# r is an re.Match
def parse_query_any(r):
limit = DEFAULT_LIMIT
offset = 0
block_offset = None
block_end = None
if r.lastindex != None:
if r.lastindex > 0:
limit = int(r[1])
if r.lastindex > 1:
offset = int(r[2])
if r.lastindex > 2:
block_offset = int(r[3])
if r.lastindex > 3:
block_end = int(r[4])
if block_end < block_offset:
raise ValueError('cart before the horse, dude')
logg.debug('data query is offset {} limit {} block_offset {} block_end {}'.format(offset, limit, block_offset, block_end))
return (offset, limit, block_offset, block_end,)
def process_default_limit(session, env):
r = re.match(re_default_limit, env.get('PATH_INFO'))
if not r:
return None
return ('application/json', str(DEFAULT_LIMIT).encode('utf-8'),)
def process_transactions_account_bloom(session, env): def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO')) r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r: if not r:
return None return None
logg.debug('match account bloom')
address = r[1] (address, offset, limit,) = parse_query_account(r)
if r[2] == None:
address = add_0x(address)
offset = 0
if r.lastindex > 2:
offset = r[4]
limit = DEFAULT_LIMIT
if r.lastindex > 4:
limit = r[6]
c = BloomCache(session) c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
@ -59,13 +108,9 @@ def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO')) r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r: if not r:
return None return None
logg.debug('match all bloom')
offset = DEFAULT_LIMIT (limit, offset, block_offset, block_end,) = parse_query_any(r)
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session) c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
@ -88,17 +133,16 @@ def process_transactions_all_data(session, env):
r = re.match(re_transactions_all_data, env.get('PATH_INFO')) r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
if not r: if not r:
return None return None
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all': #if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
return None # return None
logg.debug('match all data')
logg.debug('got data request {}'.format(env)) logg.debug('got data request {}'.format(env))
block_offset = r[1]
block_end = r[2] (offset, limit, block_offset, block_end) = parse_query_any(r)
if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude')
c = DataCache(session) c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset, block_end, oldest=True) # oldest needs to be settable
for r in tx_cache: for r in tx_cache:
r['date_block'] = r['date_block'].timestamp() r['date_block'] = r['date_block'].timestamp()
@ -113,3 +157,30 @@ def process_transactions_all_data(session, env):
j = json.dumps(o) j = json.dumps(o)
return ('application/json', j.encode('utf-8'),) return ('application/json', j.encode('utf-8'),)
def process_transactions_account_data(session, env):
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
if not r:
return None
logg.debug('match account data')
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
# return None
(address, offset, limit,) = parse_query_account(r)
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
o = {
'low': lowest_block,
'high': highest_block,
'data': tx_cache,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)

View File

@ -12,21 +12,20 @@ import cic_cache.cli
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
from cic_cache.runnable.daemons.query import ( from cic_cache.runnable.daemons.query import (
process_default_limit,
process_transactions_account_bloom, process_transactions_account_bloom,
process_transactions_account_data,
process_transactions_all_bloom, process_transactions_all_bloom,
process_transactions_all_data, process_transactions_all_data,
) )
import cic_cache.cli
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
# process args arg_flags = cic_cache.cli.argflag_std_read
arg_flags = cic_cache.cli.argflag_std_base local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
local_arg_flags = cic_cache.cli.argflag_local_task
argparser = cic_cache.cli.ArgumentParser(arg_flags) argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags) argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args() args = argparser.parse_args()
@ -35,7 +34,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config, 'cic_cache')
SessionBase.connect(dsn, config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
@ -47,9 +46,11 @@ def application(env, start_response):
session = SessionBase.create_session() session = SessionBase.create_session()
for handler in [ for handler in [
process_transactions_account_data,
process_transactions_account_bloom,
process_transactions_all_data, process_transactions_all_data,
process_transactions_all_bloom, process_transactions_all_bloom,
process_transactions_account_bloom, process_default_limit,
]: ]:
r = None r = None
try: try:

View File

@ -3,6 +3,7 @@ import logging
import os import os
import sys import sys
import argparse import argparse
import tempfile
# third-party imports # third-party imports
import celery import celery
@ -28,7 +29,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config, 'cic_cache')
SessionBase.connect(dsn) SessionBase.connect(dsn)
# set up celery # set up celery

View File

@ -50,7 +50,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config, 'cic_cache')
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
# set up rpc # set up rpc

View File

@ -5,7 +5,7 @@ version = (
0, 0,
2, 2,
1, 1,
'alpha.2', 'alpha.3',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -1,3 +0,0 @@
[celery]
broker_url = redis:///
result_url = redis:///

View File

@ -1,3 +0,0 @@
[cic]
registry_address =
trust_address =

View File

View File

@ -1,9 +0,0 @@
[database]
NAME=cic_cache
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -1,3 +0,0 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379

View File

@ -1,3 +0,0 @@
[cic]
registry_address =
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C

View File

@ -1,9 +0,0 @@
[database]
NAME=cic_cache
USER=grassroots
PASSWORD=
HOST=localhost
PORT=63432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -1,4 +0,0 @@
[syncer]
loop_interval = 1
offset = 0
no_history = 0

View File

@ -1,2 +0,0 @@
[bancor]
dir =

View File

@ -1,4 +1,3 @@
[cic] [cic]
registry_address = registry_address =
chain_spec =
trust_address = trust_address =

View File

@ -1,5 +1,5 @@
[database] [database]
NAME=cic-cache-test PREFIX=cic-cache-test
USER=postgres USER=postgres
PASSWORD= PASSWORD=
HOST=localhost HOST=localhost

View File

@ -1,5 +0,0 @@
[eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545
#chain_id =

View File

@ -1,4 +1,4 @@
openapi: "3.0.3" openapi: "3.0.2"
info: info:
title: Grassroots Economics CIC Cache title: Grassroots Economics CIC Cache
description: Cache of processed transaction data from Ethereum blockchain and worker queues description: Cache of processed transaction data from Ethereum blockchain and worker queues
@ -9,17 +9,34 @@ info:
email: will@grassecon.org email: will@grassecon.org
license: license:
name: GPLv3 name: GPLv3
version: 0.1.0 version: 0.2.0
paths: paths:
/tx/{offset}/{limit}: /defaultlimit:
description: Bloom filter for batch of latest transactions summary: The default limit value of result sets.
get:
tags:
- transactions
description:
Retrieve default limit
operationId: limit.default
responses:
200:
description: Limit query successful
content:
application/json:
schema:
$ref: "#/components/schemas/Limit"
/tx:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get: get:
tags: tags:
- transactions - transactions
description: description:
Retrieve transactions Retrieve transactions
operationId: tx.get operationId: tx.get.latest
responses: responses:
200: 200:
description: Transaction query successful. description: Transaction query successful.
@ -29,27 +46,153 @@ paths:
$ref: "#/components/schemas/BlocksBloom" $ref: "#/components/schemas/BlocksBloom"
parameters: /tx/{limit}:
- name: offset summary: Bloom filter for batch of latest transactions
in: path description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
schema:
type: integer
format: int32
- name: limit
in: path
schema:
type: integer
format: int32
/tx/{address}/{offset}/{limit}:
description: Bloom filter for batch of latest transactions by account
get: get:
tags: tags:
- transactions - transactions
description: description:
Retrieve transactions Retrieve transactions
operationId: tx.get operationId: tx.get.latest.limit
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}/{block_offset}:
summary: Bloom filter for batch of transactions since a particular block.
description: Generate a bloom filter of the latest transactions since a particular block in the cache. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range.block.offset
responses:
200:
description: Transaction query successful. Results are ordered from oldest to newest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}/{block_offset}/{block_end}:
summary: Bloom filter for batch of transactions within a particular block range.
description: Generate a bloom filter of the latest transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range.block.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/tx/{address}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user
responses: responses:
200: 200:
description: Transaction query successful. description: Transaction query successful.
@ -58,6 +201,30 @@ paths:
schema: schema:
$ref: "#/components/schemas/BlocksBloom" $ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
/tx/{address}/{limit}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters: parameters:
- name: address - name: address
@ -65,26 +232,317 @@ paths:
required: true required: true
schema: schema:
type: string type: string
- name: offset
in: path
schema:
type: integer
format: int32
- name: limit - name: limit
in: path in: path
required: true
schema: schema:
type: integer type: integer
format: int32 format: int32
/tx/{address}/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions by account
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/txa:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
/txa/{limit}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}:
summary: Cached data for transactions since a particular block.
description: Return cached data entries of transactions since a particular block. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.offset
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}/{block_end}:
summary: Cached data for transactions within a particular block range.
description: Return cached data entries of transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
/txa/{address}/{limit}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}/{limit}/{offset}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
components: components:
schemas: schemas:
Limit:
type: integer
format: int32
BlocksBloom: BlocksBloom:
type: object type: object
properties: properties:
low: low:
type: int type: integer
format: int32 format: int32
description: The lowest block number included in the filter description: The lowest block number included in the filter
high:
type: integer
format: int32
description: The highest block number included in the filter
block_filter: block_filter:
type: string type: string
format: byte format: byte
@ -97,6 +555,89 @@ components:
type: string type: string
description: Hashing algorithm (currently only using sha256) description: Hashing algorithm (currently only using sha256)
filter_rounds: filter_rounds:
type: int type: integer
format: int32 format: int32
description: Number of hash rounds used to create the filter description: Number of hash rounds used to create the filter
TransactionList:
type: object
properties:
low:
type: integer
format: int32
description: The lowest block number included in the result set
high:
type: integer
format: int32
description: The highest block number included in the filter
data:
type: array
description: Cached transaction data
items:
$ref: "#/components/schemas/Transaction"
Transaction:
type: object
properties:
block_number:
type: integer
format: int64
description: Block number transaction was included in.
tx_hash:
type: string
description: Transaction hash, in hex.
date_block:
type: integer
format: int32
description: Block timestamp.
sender:
type: string
description: Spender address, in hex.
recipient:
type: string
description: Beneficiary address, in hex.
from_value:
type: integer
format: int64
description: Value deducted from spender's balance.
to_value:
type: integer
format: int64
description: Value added to beneficiary's balance.
source_token:
type: string
description: Network address of token in which `from_value` is denominated.
destination_token:
type: string
description: Network address of token in which `to_value` is denominated.
success:
type: boolean
description: Network consensus state on whether the transaction was successful or not.
tx_type:
type: string
enum:
- erc20.faucet
- faucet.give_to
examples:
data_last:
summary: Get the latest cached transactions, using the server's default limit.
value: "/txa"
data_limit:
summary: Get the last 42 cached transactions.
value: "/txa/42"
data_range:
summary: Get the next 42 cached transactions, starting from the 13th (zero-indexed).
value: "/txa/42/13"
data_range_block_offset:
summary: Get the next 42 cached transactions, starting from block 1337 (inclusive).
value: "/txa/42/0/1337"
data_range_block_offset:
summary: Get the next 42 cached transactions within blocks 1337 and 1453 (inclusive).
value: "/txa/42/0/1337/1453"
data_range_block_range:
summary: Get the next 42 cached transactions after the 13th, within blocks 1337 and 1453 (inclusive).
value: "/txa/42/13/1337/1453"

View File

@ -4,9 +4,9 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
COPY requirements.txt . COPY requirements.txt .
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net" ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_ARGS="" ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL="https://pypi.org/simple" ARG PIP_INDEX_URL=https://pypi.org/simple
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \ pip install --index-url $PIP_INDEX_URL \
@ -14,14 +14,9 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt -r requirements.txt
COPY . .
RUN python setup.py install COPY . .
RUN pip install . --extra-index-url $EXTRA_PIP_INDEX_URL
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-cache/
# for db migrations # for db migrations
COPY ./aux/wait-for-it/wait-for-it.sh ./ COPY ./aux/wait-for-it/wait-for-it.sh ./

View File

@ -2,5 +2,5 @@
set -e set -e
>&2 echo executing database migration >&2 echo executing database migration
python scripts/migrate.py --migrations-dir /usr/local/share/cic-cache/alembic -vv python scripts/migrate_cic_cache.py --migrations-dir /usr/local/share/cic-cache/alembic -vv
set +e set +e

View File

@ -1,14 +1,15 @@
alembic==1.4.2 alembic==1.4.2
confini>=0.3.6rc4,<0.5.0 confini~=0.5.3
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.1.1b2 moolb~=0.2.0
cic-eth-registry~=0.6.1a1 cic-eth-registry~=0.6.6
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainsyncer[sql]>=0.0.6a3,<0.1.0 chainsyncer[sql]~=0.0.7
erc20-faucet>=0.3.2a2, <0.4.0 erc20-faucet~=0.3.2
chainlib-eth>=0.0.9a14,<0.1.0 chainlib-eth~=0.0.15
eth-address-index>=0.2.3a4,<0.3.0 eth-address-index~=0.2.4
okota~=0.2.5

View File

@ -1,54 +1,55 @@
#!/usr/bin/python #!/usr/bin/python3
# standard imports
import os import os
import argparse import argparse
import logging import logging
import re import re
# external imports
import alembic import alembic
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig
import confini import confini
# local imports
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
import cic_cache.cli
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
# BUG: the dbdir doesn't work after script install # BUG: the dbdir doesn't work after script install
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) rootdir = os.path.dirname(os.path.dirname(os.path.realpath(cic_cache.__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db') dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations') default_migrations_dir = os.path.join(dbdir, 'migrations')
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config') configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
#config_dir = os.path.join('/usr/local/etc/cic-cache') #config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser() arg_flags = cic_cache.cli.argflag_std_base
argparser.add_argument('-c', type=str, help='config file') local_arg_flags = cic_cache.cli.argflag_local_sync
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory') argparser.process_local_flags(local_arg_flags)
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading') argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', action='store_true', help='force action') argparser.add_argument('-f', '--force', action='store_true', help='force action')
argparser.add_argument('-v', action='store_true', help='be verbose') argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='migrations directory')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
if args.vv: extra_args = {
logging.getLogger().setLevel(logging.DEBUG) 'reset': None,
elif args.v: 'force': None,
logging.getLogger().setLevel(logging.INFO) 'migrations_dir': None,
}
# process config
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
config = confini.Config(configdir, args.env_prefix) migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE', 'default'))
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_dir): if not os.path.isdir(migrations_dir):
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE'))) logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
migrations_dir = os.path.join(args.migrations_dir, 'default') migrations_dir = os.path.join(args.migrations_dir, 'default')
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config, 'cic_cache')
logg.info('using migrations dir {}'.format(migrations_dir)) logg.info('using migrations dir {}'.format(migrations_dir))

View File

@ -1,6 +1,7 @@
[metadata] [metadata]
name = cic-cache name = cic-cache
description = CIC Cache API and server description = CIC Cache API and server
version = 0.3.0a2
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no
url = https://gitlab.com/grassrootseconomics/cic-eth url = https://gitlab.com/grassrootseconomics/cic-eth
@ -34,7 +35,7 @@ packages =
cic_cache.runnable.daemons cic_cache.runnable.daemons
cic_cache.runnable.daemons.filters cic_cache.runnable.daemons.filters
scripts = scripts =
./scripts/migrate.py ./scripts/migrate_cic_cache.py
[options.entry_points] [options.entry_points]
console_scripts = console_scripts =

View File

@ -1,38 +1,39 @@
from setuptools import setup from setuptools import setup
import configparser # import configparser
import os import os
import time
from cic_cache.version import ( # import time
version_object,
version_string
)
class PleaseCommitFirstError(Exception): # from cic_cache.version import (
pass # version_object,
# version_string
def git_hash(): # )
import subprocess #
git_diff = subprocess.run(['git', 'diff'], capture_output=True) # class PleaseCommitFirstError(Exception):
if len(git_diff.stdout) > 0: # pass
raise PleaseCommitFirstError() #
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True) # def git_hash():
git_hash_brief = git_hash.stdout.decode('utf-8')[:8] # import subprocess
return git_hash_brief # git_diff = subprocess.run(['git', 'diff'], capture_output=True)
# if len(git_diff.stdout) > 0:
version_string = str(version_object) # raise PleaseCommitFirstError()
# git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
try: # git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
version_git = git_hash() # return git_hash_brief
version_string += '+build.{}'.format(version_git) #
except FileNotFoundError: # version_string = str(version_object)
time_string_pair = str(time.time()).split('.') #
version_string += '+build.{}{:<09d}'.format( # try:
time_string_pair[0], # version_git = git_hash()
int(time_string_pair[1]), # version_string += '+build.{}'.format(version_git)
) # except FileNotFoundError:
print('final version string will be {}'.format(version_string)) # time_string_pair = str(time.time()).split('.')
# version_string += '+build.{}{:<09d}'.format(
# time_string_pair[0],
# int(time_string_pair[1]),
# )
# print('final version string will be {}'.format(version_string))
requirements = [] requirements = []
f = open('requirements.txt', 'r') f = open('requirements.txt', 'r')
@ -52,9 +53,8 @@ while True:
test_requirements.append(l.rstrip()) test_requirements.append(l.rstrip())
f.close() f.close()
setup( setup(
version=version_string, # version=version_string,
install_requires=requirements, install_requires=requirements,
tests_require=test_requirements, tests_require=test_requirements,
) )

View File

@ -7,4 +7,4 @@ pytest-celery==0.0.0a1
eth_tester==0.5.0b3 eth_tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
sarafu-faucet~=0.0.7a1 sarafu-faucet~=0.0.7a1
erc20-transfer-authorization>=0.3.5a1,<0.4.0 erc20-transfer-authorization~=0.3.6

View File

@ -6,6 +6,7 @@ import datetime
# external imports # external imports
import pytest import pytest
import moolb import moolb
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache import db from cic_cache import db
@ -42,6 +43,8 @@ def txs(
list_tokens, list_tokens,
): ):
tx_normalize = TxHexNormalizer()
session = init_database session = init_database
tx_number = 13 tx_number = 13
@ -54,10 +57,10 @@ def txs(
tx_hash_first, tx_hash_first,
list_defaults['block'], list_defaults['block'],
tx_number, tx_number,
list_actors['alice'], tx_normalize.wallet_address(list_actors['alice']),
list_actors['bob'], tx_normalize.wallet_address(list_actors['bob']),
list_tokens['foo'], tx_normalize.executable_address(list_tokens['foo']),
list_tokens['foo'], tx_normalize.executable_address(list_tokens['foo']),
1024, 1024,
2048, 2048,
True, True,
@ -74,10 +77,10 @@ def txs(
tx_hash_second, tx_hash_second,
list_defaults['block']-1, list_defaults['block']-1,
tx_number, tx_number,
list_actors['diane'], tx_normalize.wallet_address(list_actors['diane']),
list_actors['alice'], tx_normalize.wallet_address(list_actors['alice']),
list_tokens['foo'], tx_normalize.executable_address(list_tokens['foo']),
list_tokens['foo'], tx_normalize.wallet_address(list_tokens['foo']),
1024, 1024,
2048, 2048,
False, False,
@ -103,6 +106,8 @@ def more_txs(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
tx_number = 666 tx_number = 666
tx_hash = '0x' + os.urandom(32).hex() tx_hash = '0x' + os.urandom(32).hex()
tx_signed = '0x' + os.urandom(128).hex() tx_signed = '0x' + os.urandom(128).hex()
@ -115,10 +120,10 @@ def more_txs(
tx_hash, tx_hash,
list_defaults['block']+2, list_defaults['block']+2,
tx_number, tx_number,
list_actors['alice'], tx_normalize.wallet_address(list_actors['alice']),
list_actors['diane'], tx_normalize.wallet_address(list_actors['diane']),
list_tokens['bar'], tx_normalize.executable_address(list_tokens['bar']),
list_tokens['bar'], tx_normalize.executable_address(list_tokens['bar']),
2048, 2048,
4096, 4096,
False, False,

View File

@ -14,7 +14,8 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def load_config(): def load_config():
config_dir = os.path.join(root_dir, 'config/test') config_dir = os.path.join(root_dir, 'config/test')
conf = confini.Config(config_dir, 'CICTEST') schema_config_dir = os.path.join(root_dir, 'cic_cache', 'data', 'config')
conf = confini.Config(schema_config_dir, 'CICTEST', override_dirs=config_dir)
conf.process() conf.process()
logg.debug('config {}'.format(conf)) logg.debug('config {}'.format(conf))
return conf return conf

View File

@ -24,11 +24,15 @@ def database_engine(
if load_config.get('DATABASE_ENGINE') == 'sqlite': if load_config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.transactional = False SessionBase.transactional = False
SessionBase.poolable = False SessionBase.poolable = False
name = 'cic_cache'
database_name = name
if load_config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(load_config.get('DATABASE_PREFIX'), database_name)
try: try:
os.unlink(load_config.get('DATABASE_NAME')) os.unlink(database_name)
except FileNotFoundError: except FileNotFoundError:
pass pass
dsn = dsn_from_config(load_config) dsn = dsn_from_config(load_config, name)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
return dsn return dsn

View File

@ -14,7 +14,7 @@ def test_api_all_data(
): ):
env = { env = {
'PATH_INFO': '/txa/410000/420000', 'PATH_INFO': '/txa/100/0/410000/420000',
'HTTP_X_CIC_CACHE_MODE': 'all', 'HTTP_X_CIC_CACHE_MODE': 'all',
} }
j = process_transactions_all_data(init_database, env) j = process_transactions_all_data(init_database, env)
@ -23,7 +23,7 @@ def test_api_all_data(
assert len(o['data']) == 2 assert len(o['data']) == 2
env = { env = {
'PATH_INFO': '/txa/420000/410000', 'PATH_INFO': '/txa/100/0/420000/410000',
'HTTP_X_CIC_CACHE_MODE': 'all', 'HTTP_X_CIC_CACHE_MODE': 'all',
} }

View File

@ -6,6 +6,7 @@ import json
# external imports # external imports
import pytest import pytest
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache import db from cic_cache import db
@ -62,6 +63,8 @@ def test_cache_ranges(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1 oldest = list_defaults['block'] - 1
mid = list_defaults['block'] mid = list_defaults['block']
newest = list_defaults['block'] + 2 newest = list_defaults['block'] + 2
@ -100,32 +103,39 @@ def test_cache_ranges(
assert b[1] == mid assert b[1] == mid
# now check when supplying account # now check when supplying account
b = c.load_transactions_account(list_actors['alice'], 0, 100) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
b = c.load_transactions_account(list_actors['bob'], 0, 100) account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account(account, 0, 100)
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
b = c.load_transactions_account(list_actors['diane'], 0, 100) account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
# add block filter to the mix # add block filter to the mix
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest assert b[0] == oldest
assert b[1] == oldest assert b[1] == oldest
@ -140,6 +150,8 @@ def test_cache_ranges_data(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1 oldest = list_defaults['block'] - 1
mid = list_defaults['block'] mid = list_defaults['block']
newest = list_defaults['block'] + 2 newest = list_defaults['block'] + 2
@ -203,7 +215,8 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
# now check when supplying account # now check when supplying account
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 3 assert len(b[2]) == 3
@ -211,13 +224,15 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
assert b[2][2]['tx_hash'] == more_txs[2] assert b[2][2]['tx_hash'] == more_txs[2]
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100) account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
assert len(b[2]) == 1 assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1] assert b[2][0]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100) account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
@ -225,27 +240,31 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[2] assert b[2][1]['tx_hash'] == more_txs[2]
# add block filter to the mix # add block filter to the mix
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0] assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0] assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
assert len(b[2]) == 1 assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1] assert b[2][0]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block']) account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest assert b[0] == oldest
assert b[1] == oldest assert b[1] == oldest
assert len(b[2]) == 1 assert len(b[2]) == 1

View File

@ -82,7 +82,7 @@ def test_query_regex(
[ [
('alice', None, None, [(420000, 13), (419999, 42)]), ('alice', None, None, [(420000, 13), (419999, 42)]),
('alice', None, 1, [(420000, 13)]), ('alice', None, 1, [(420000, 13)]),
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block'] ('alice', 1, 1, [(419999, 42)]), # 420000 == list_defaults['block']
('alice', 2, None, []), # 420000 == list_defaults['block'] ('alice', 2, None, []), # 420000 == list_defaults['block']
], ],
) )
@ -107,10 +107,11 @@ def test_query_process_txs_account(
path_info = '/tx/user/0x' + strip_0x(actor) path_info = '/tx/user/0x' + strip_0x(actor)
if query_offset != None: if query_offset != None:
path_info += '/' + str(query_offset) path_info += '/' + str(query_offset)
if query_limit != None: if query_limit == None:
if query_offset == None: query_limit = 100
path_info += '/0' path_info += '/' + str(query_limit)
path_info += '/' + str(query_limit) if query_offset == None:
path_info += '/0'
env = { env = {
'PATH_INFO': path_info, 'PATH_INFO': path_info,
} }
@ -192,7 +193,7 @@ def test_query_process_txs_bloom(
@pytest.mark.parametrize( @pytest.mark.parametrize(
'query_block_start, query_block_end, query_match_count', 'query_block_start, query_block_end, query_match_count',
[ [
(None, 42, 0), (1, 42, 0),
(420000, 420001, 1), (420000, 420001, 1),
(419999, 419999, 1), # matches are inclusive (419999, 419999, 1), # matches are inclusive
(419999, 420000, 2), (419999, 420000, 2),
@ -211,7 +212,7 @@ def test_query_process_txs_data(
query_match_count, query_match_count,
): ):
path_info = '/txa' path_info = '/txa/100/0'
if query_block_start != None: if query_block_start != None:
path_info += '/' + str(query_block_start) path_info += '/' + str(query_block_start)
if query_block_end != None: if query_block_end != None:
@ -227,4 +228,5 @@ def test_query_process_txs_data(
assert r != None assert r != None
o = json.loads(r[1]) o = json.loads(r[1])
logg.debug('oo {}'.format(o))
assert len(o['data']) == query_match_count assert len(o['data']) == query_match_count

View File

@ -1,5 +1,5 @@
celery==4.4.7 celery==4.4.7
erc20-demurrage-token~=0.0.5a3 erc20-demurrage-token~=0.0.6
cic-eth-registry~=0.6.1a6 cic-eth-registry~=0.6.3
chainlib~=0.0.9rc1 chainlib~=0.0.14
cic_eth~=0.12.4a11 cic_eth~=0.12.6

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = cic-eth-aux-erc20-demurrage-token name = cic-eth-aux-erc20-demurrage-token
version = 0.0.2a7 version = 0.0.3
description = cic-eth tasks supporting erc20 demurrage token description = cic-eth tasks supporting erc20 demurrage token
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no

View File

@ -1,5 +1,4 @@
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
cic-eth-registry>=0.6.1a6,<0.7.0 hexathon~=0.1.0
hexathon~=0.0.1a8 chainqueue~=0.0.6a4
chainqueue>=0.0.4a6,<0.1.0 eth-erc20~=0.1.5
eth-erc20>=0.1.2a2,<0.2.0

View File

@ -63,22 +63,32 @@ class Config(BaseConfig):
config.get('REDIS_HOST'), config.get('REDIS_HOST'),
config.get('REDIS_PORT'), config.get('REDIS_PORT'),
) )
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
redis_url = ( redis_url = (
'redis', 'redis',
hostport, hostport,
getattr(args, 'redis_db', None), db,
) )
celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL')) celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL'))
hostport = urlhostmerge( hostport = urlhostmerge(
celery_config_url[1], celery_config_url[1],
getattr(args, 'celery_host', None), getattr(args, 'celery_host', None),
getattr(args, 'celery_port', None), getattr(args, 'celery_port', None),
) )
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
celery_arg_url = ( celery_arg_url = (
getattr(args, 'celery_scheme', None), getattr(args, 'celery_scheme', None),
hostport, hostport,
getattr(args, 'celery_db', None), db,
) )
celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url) celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url)
celery_url_string = urllib.parse.urlunsplit(celery_url) celery_url_string = urllib.parse.urlunsplit(celery_url)
local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string

View File

@ -22,7 +22,7 @@ from hexathon import (
from chainqueue.error import NotLocalTxError from chainqueue.error import NotLocalTxError
from eth_erc20 import ERC20 from eth_erc20 import ERC20
from chainqueue.sql.tx import cache_tx_dict from chainqueue.sql.tx import cache_tx_dict
from okota.token_index import to_identifier from okota.token_index.index import to_identifier
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
@ -46,13 +46,14 @@ from cic_eth.task import (
from cic_eth.eth.nonce import CustodialTaskNonceOracle from cic_eth.eth.nonce import CustodialTaskNonceOracle
from cic_eth.encode import tx_normalize from cic_eth.encode import tx_normalize
from cic_eth.eth.trust import verify_proofs from cic_eth.eth.trust import verify_proofs
from cic_eth.error import SignerError
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
@celery_app.task(base=CriticalWeb3Task) @celery_app.task(bind=True, base=CriticalWeb3Task)
def balance(tokens, holder_address, chain_spec_dict): def balance(self, tokens, holder_address, chain_spec_dict):
"""Return token balances for a list of tokens for given address """Return token balances for a list of tokens for given address
:param tokens: Token addresses :param tokens: Token addresses
@ -71,8 +72,9 @@ def balance(tokens, holder_address, chain_spec_dict):
for t in tokens: for t in tokens:
address = t['address'] address = t['address']
logg.debug('address {} {}'.format(address, holder_address)) logg.debug('address {} {}'.format(address, holder_address))
gas_oracle = self.create_gas_oracle(rpc, min_price=self.min_fee_price)
token = ERC20Token(chain_spec, rpc, add_0x(address)) token = ERC20Token(chain_spec, rpc, add_0x(address))
c = ERC20(chain_spec) c = ERC20(chain_spec, gas_oracle=gas_oracle)
o = c.balance_of(address, holder_address, sender_address=caller_address) o = c.balance_of(address, holder_address, sender_address=caller_address)
r = rpc.do(o) r = rpc.do(o)
t['balance_network'] = c.parse_balance(r) t['balance_network'] = c.parse_balance(r)

View File

@ -92,7 +92,7 @@ def apply_gas_value_cache_local(address, method, value, tx_hash, session=None):
if o == None: if o == None:
o = GasCache(address, method, value, tx_hash) o = GasCache(address, method, value, tx_hash)
elif tx.gas_used > o.value: elif value > o.value:
o.value = value o.value = value
o.tx_hash = strip_0x(tx_hash) o.tx_hash = strip_0x(tx_hash)

View File

@ -25,12 +25,14 @@ logg = logging.getLogger()
celery_app = celery.current_app celery_app = celery.current_app
class BaseTask(celery.Task): class BaseTask(celery.Task):
session_func = SessionBase.create_session session_func = SessionBase.create_session
call_address = ZERO_ADDRESS call_address = ZERO_ADDRESS
trusted_addresses = [] trusted_addresses = []
min_fee_price = 1 min_fee_price = 1
min_fee_limit = 30000
default_token_address = None default_token_address = None
default_token_symbol = None default_token_symbol = None
default_token_name = None default_token_name = None
@ -42,7 +44,7 @@ class BaseTask(celery.Task):
if address == None: if address == None:
return RPCGasOracle( return RPCGasOracle(
conn, conn,
code_callback=kwargs.get('code_callback'), code_callback=kwargs.get('code_callback', self.get_min_fee_limit),
min_price=self.min_fee_price, min_price=self.min_fee_price,
id_generator=kwargs.get('id_generator'), id_generator=kwargs.get('id_generator'),
) )
@ -56,6 +58,10 @@ class BaseTask(celery.Task):
) )
def get_min_fee_limit(self, code):
return self.min_fee_limit
def create_session(self): def create_session(self):
return BaseTask.session_func() return BaseTask.session_func()

View File

@ -11,13 +11,6 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_ARGS="" ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple ARG PIP_INDEX_URL=https://pypi.org/simple
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./ COPY *requirements.txt ./
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \ pip install --index-url $PIP_INDEX_URL \
@ -25,7 +18,7 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt \ -r requirements.txt \
-r services_requirements.txt \ -r services_requirements.txt \
-r admin_requirements.txt -r admin_requirements.txt
COPY . . COPY . .
RUN python setup.py install RUN python setup.py install
@ -40,8 +33,6 @@ RUN chmod 755 *.sh
# # they can all be overridden by environment variables # # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package) # # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/ #COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library? # TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh #COPY util/liveness/health.sh /usr/local/bin/health.sh
@ -66,9 +57,8 @@ ENTRYPOINT []
## # they can all be overridden by environment variables ## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package) ## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/ #COPY config/ /usr/local/etc/cic-eth/
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/ COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/ #COPY scripts/ scripts/
#COPY scripts/ scripts/
# #
## TODO this kind of code sharing across projects should be discouraged...can we make util a library? ## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
##COPY util/liveness/health.sh /usr/local/bin/health.sh ##COPY util/liveness/health.sh /usr/local/bin/health.sh

View File

@ -1,4 +1,7 @@
celery==4.4.7 celery==4.4.7
chainlib-eth>=0.0.10a20,<0.1.0
semver==2.13.0 semver==2.13.0
urlybird~=0.0.1a2 chainlib-eth~=0.0.15
urlybird~=0.0.1
cic-eth-registry~=0.6.6
cic-types~=0.2.1a8
cic-eth-aux-erc20-demurrage-token~=0.0.3

View File

@ -1,16 +1,15 @@
chainqueue>=0.0.6a1,<0.1.0 chainqueue~=0.0.6a4
chainsyncer[sql]>=0.0.7a3,<0.1.0 chainsyncer[sql]~=0.0.7
alembic==1.4.2 alembic==1.4.2
confini>=0.3.6rc4,<0.5.0 confini~=0.5.3
redis==3.5.3 redis==3.5.3
hexathon~=0.0.1a8 hexathon~=0.1.0
pycryptodome==3.10.1 pycryptodome==3.10.1
liveness~=0.0.1a7 liveness~=0.0.1a7
eth-address-index>=0.2.4a1,<0.3.0 eth-address-index~=0.2.4
eth-accounts-index>=0.1.2a3,<0.2.0 eth-accounts-index~=0.1.2
cic-eth-registry>=0.6.1a6,<0.7.0 erc20-faucet~=0.3.2
erc20-faucet>=0.3.2a2,<0.4.0 erc20-transfer-authorization~=0.3.6
erc20-transfer-authorization>=0.3.5a2,<0.4.0 sarafu-faucet~=0.0.7
sarafu-faucet>=0.0.7a2,<0.1.0 moolb~=0.2.0
moolb~=0.1.1b2 okota~=0.2.5
okota>=0.2.4a6,<0.3.0

View File

@ -1,7 +1,7 @@
[metadata] [metadata]
name = cic-eth name = cic-eth
#version = attr: cic_eth.version.__version_string__ #version = attr: cic_eth.version.__version_string__
version = 0.12.5a2 version = 0.12.7
description = CIC Network Ethereum interaction description = CIC Network Ethereum interaction
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no

View File

@ -6,4 +6,5 @@ pytest-redis==2.0.0
redis==3.5.3 redis==3.5.3
eth-tester==0.5.0b3 eth-tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
eth-erc20~=0.1.2a2 eth-erc20~=0.1.5
erc20-transfer-authorization~=0.3.6

View File

@ -40,6 +40,7 @@ def test_filter_gas(
foo_token, foo_token,
token_registry, token_registry,
register_lookups, register_lookups,
register_tokens,
celery_session_worker, celery_session_worker,
cic_registry, cic_registry,
): ):
@ -69,7 +70,7 @@ def test_filter_gas(
tx = Tx(tx_src, block=block) tx = Tx(tx_src, block=block)
tx.apply_receipt(rcpt) tx.apply_receipt(rcpt)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database) t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert t == None assert t.get() == None
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc) nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)

View File

@ -288,7 +288,6 @@ def test_fix_nonce(
init_database.commit() init_database.commit()
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database) txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys() ks = txs.keys()
assert len(ks) == 2 assert len(ks) == 2

View File

@ -191,11 +191,17 @@ def test_tokens(
break break
api_param = str(uuid.uuid4()) api_param = str(uuid.uuid4())
fp = os.path.join(CallbackTask.mmap_path, api_param)
f = open(fp, 'wb+')
f.write(b'\x00')
f.close()
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback') api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
t = api.tokens(['BAR'], proof=[[bar_token_declaration]]) t = api.tokens(['BAR'], proof=[[bar_token_declaration]])
r = t.get() r = t.get()
logg.debug('rr {} {}'.format(r, t.children)) logg.debug('rr {} {}'.format(r, t.children))
while True: while True:
fp = os.path.join(CallbackTask.mmap_path, api_param) fp = os.path.join(CallbackTask.mmap_path, api_param)
try: try:

View File

@ -35,10 +35,26 @@ from hexathon import strip_0x
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.error import OutOfGasError from cic_eth.error import OutOfGasError
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.task import BaseTask
logg = logging.getLogger() logg = logging.getLogger()
def test_task_gas_limit(
eth_rpc,
eth_signer,
default_chain_spec,
agent_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
gas_oracle = BaseTask().create_gas_oracle(rpc)
c = Gas(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 10, tx_format=TxFormat.RLP_SIGNED)
tx = unpack(bytes.fromhex(strip_0x(o)), default_chain_spec)
assert (tx['gas'], BaseTask.min_fee_price)
def test_task_check_gas_ok( def test_task_check_gas_ok(
default_chain_spec, default_chain_spec,
eth_rpc, eth_rpc,

View File

@ -143,7 +143,7 @@ def test_incoming_balance(
'converters': [], 'converters': [],
} }
b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
assert b[0]['balance_incoming'] == 0 assert b[0]['balance_incoming'] == 1000
otx.readysend(session=init_database) otx.readysend(session=init_database)
init_database.flush() init_database.flush()
@ -152,8 +152,8 @@ def test_incoming_balance(
otx.sent(session=init_database) otx.sent(session=init_database)
init_database.commit() init_database.commit()
b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) #b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
assert b[0]['balance_incoming'] == 1000 #assert b[0]['balance_incoming'] == 1000
otx.success(block=1024, session=init_database) otx.success(block=1024, session=init_database)
init_database.commit() init_database.commit()

View File

@ -1,7 +1,5 @@
crypto-dev-signer>=0.4.15rc2,<=0.4.15 chainqueue~=0.0.6a4
chainqueue>=0.0.5a3,<0.1.0
cic-eth-registry>=0.6.1a6,<0.7.0
redis==3.5.3 redis==3.5.3
hexathon~=0.0.1a8 hexathon~=0.1.0
pycryptodome==3.10.1 pycryptodome==3.10.1
pyxdg==0.27 pyxdg==0.27

View File

@ -9,7 +9,7 @@ import semver
logg = logging.getLogger() logg = logging.getLogger()
version = (0, 4, 0, 'alpha.11') version = (0, 4, 0, 'alpha.12')
version_object = semver.VersionInfo( version_object = semver.VersionInfo(
major=version[0], major=version[0],

View File

@ -1,4 +1,4 @@
confini>=0.3.6rc4,<0.5.0 confini~=0.5.1
africastalking==1.2.3 africastalking==1.2.3
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
alembic==1.4.2 alembic==1.4.2

View File

@ -1,7 +1,7 @@
# standard imports # standard imports
import semver import semver
version = (0, 3, 1, 'alpha.6') version = (0, 3, 1, 'alpha.7')
version_object = semver.VersionInfo( version_object = semver.VersionInfo(
major=version[0], major=version[0],

View File

@ -14,19 +14,12 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_ARGS="" ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple ARG PIP_INDEX_URL=https://pypi.org/simple
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./ COPY *requirements.txt ./
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \ pip install --index-url $PIP_INDEX_URL \
--pre \ --pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt -r requirements.txt
COPY . . COPY . .

View File

@ -4,10 +4,11 @@ billiard==3.6.4.0
bcrypt==3.2.0 bcrypt==3.2.0
celery==4.4.7 celery==4.4.7
cffi==1.14.6 cffi==1.14.6
cic-eth~=0.12.5a1 cic-eth~=0.12.6
cic-notify~=0.4.0a11 cic-notify~=0.4.0a12
cic-types~=0.2.1a7 cic-types~=0.2.1a8
confini>=0.3.6rc4,<0.5.0 confini~=0.5.2
cic-eth-aux-erc20-demurrage-token~=0.0.3
phonenumbers==8.12.12 phonenumbers==8.12.12
psycopg2==2.8.6 psycopg2==2.8.6
python-i18n[YAML]==0.3.9 python-i18n[YAML]==0.3.9

View File

@ -41,17 +41,17 @@ add_pending_tx_hash $r
# Deploy transfer authorization contact # Deploy transfer authorization contact
advance_nonce #advance_nonce
debug_rpc #debug_rpc
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m" #>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG` #DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG`
#
#
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m" #>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
advance_nonce #advance_nonce
debug_rpc #debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS` #r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
add_pending_tx_hash $r #add_pending_tx_hash $r
# Deploy token index contract # Deploy token index contract
@ -66,6 +66,13 @@ debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS` r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
add_pending_tx_hash $r add_pending_tx_hash $r
# Assign writer for token index
>&2 echo -e "\033[;96mEnable token index writer $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER to write to accounts index contract at $DEV_TOKEN_INDEX_ADDRESS\033[;39m"
advance_nonce
debug_rpc
r=`eth-accounts-index-writer -s -u -i $CHAIN_SPEC -p $RPC_PROVIDER --nonce $nonce --fee-limit 1000000 -e $DEV_TOKEN_INDEX_ADDRESS $DEV_DEBUG_FLAG $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
add_pending_tx_hash $r
check_wait 2 check_wait 2
echo -e "\033[;96mWriting env_reset file\033[;39m" echo -e "\033[;96mWriting env_reset file\033[;39m"

View File

@ -61,7 +61,7 @@ function deploy_token_erc20_demurrage_token() {
_deploy_token_defaults "DET" "Demurrage Token" _deploy_token_defaults "DET" "Demurrage Token"
advance_nonce advance_nonce
debug_rpc debug_rpc
TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s` TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG -ww -s`
} }
function deploy_accounts_index() { function deploy_accounts_index() {

View File

@ -26,16 +26,7 @@ ARG pip_trusted_host=pypi.org
RUN pip install --index-url $PIP_INDEX_URL \ RUN pip install --index-url $PIP_INDEX_URL \
--pre \ --pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt -r requirements.txt
COPY override_requirements.txt .
RUN pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
--force-reinstall \
--no-cache \
-r override_requirements.txt
RUN pip freeze RUN pip freeze

View File

@ -1 +0,0 @@
chainlib-eth==0.0.10a15

View File

@ -1,13 +1,12 @@
cic-eth[tools]==0.12.5a2 cic-eth[tools]==0.12.7
chainlib-eth>=0.0.10a17,<0.1.0 cic-types~=0.2.1a8
eth-erc20>=0.1.2a3,<0.2.0 chainlib-eth~=0.0.15
erc20-demurrage-token>=0.0.5a2,<0.1.0 eth-erc20~=0.1.5
eth-address-index>=0.2.4a1,<0.3.0 erc20-demurrage-token~=0.0.7
cic-eth-registry>=0.6.1a6,<0.7.0 eth-address-index~=0.2.4
erc20-transfer-authorization>=0.3.5a2,<0.4.0 cic-eth-registry~=0.6.5
erc20-faucet>=0.3.2a2,<0.4.0 erc20-faucet==0.3.2
sarafu-faucet>=0.0.7a2,<0.1.0 sarafu-faucet==0.0.7
confini>=0.4.2rc3,<1.0.0 confini~=0.5.3
eth-token-index>=0.2.4a1,<=0.3.0 eth-token-index==0.2.4
okota>=0.2.4a15,<0.3.0 okota==0.2.5a1
cic-types~=0.2.1a2

View File

@ -18,6 +18,7 @@ from eth_address_declarator.declarator import AddressDeclarator
from funga.eth.signer import EIP155Signer from funga.eth.signer import EIP155Signer
from funga.eth.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
from hexathon import add_0x, strip_0x from hexathon import add_0x, strip_0x
from okota.token_index.index import to_identifier
# local imports # local imports
@ -109,6 +110,7 @@ if __name__ == '__main__':
identifier = bytes.fromhex(hashed_token_proof) identifier = bytes.fromhex(hashed_token_proof)
token_immutable_proof_writer = MetadataRequestsHandler(cic_type=MetadataPointer.NONE, identifier=identifier) token_immutable_proof_writer = MetadataRequestsHandler(cic_type=MetadataPointer.NONE, identifier=identifier)
write_metadata(token_immutable_proof_writer, token_proof_data) write_metadata(token_immutable_proof_writer, token_proof_data)
logg.debug(f'Writing hashed proof: {hashed_token_proof}')
write_to_declarator(contract_address=args.address_declarator, write_to_declarator(contract_address=args.address_declarator,
contract_wrapper=contract_wrapper, contract_wrapper=contract_wrapper,
proof=hashed_token_proof, proof=hashed_token_proof,
@ -116,12 +118,11 @@ if __name__ == '__main__':
signer_address=args.signer_address, signer_address=args.signer_address,
token_address=args.e) token_address=args.e)
hashed_token_proof = hash_proof(args.token_symbol.encode('utf-8')) hashed_token_proof = to_identifier(args.token_symbol)
identifier = bytes.fromhex(hashed_token_proof) logg.debug(f'Writing hashed proof: {hashed_token_proof}')
token_immutable_proof_writer = MetadataRequestsHandler(cic_type=MetadataPointer.NONE, identifier=identifier)
write_to_declarator(contract_address=args.address_declarator, write_to_declarator(contract_address=args.address_declarator,
contract_wrapper=contract_wrapper, contract_wrapper=contract_wrapper,
proof=identifier, proof=hashed_token_proof,
rpc=rpc, rpc=rpc,
signer_address=args.signer_address, signer_address=args.signer_address,
token_address=args.e) token_address=args.e)

View File

@ -1,48 +1,37 @@
# standard imports # standard imports
import argparse
import json
import logging
import os import os
import sys import sys
import logging
import time
import argparse
import sys
import re
import hashlib
import csv
import json
# external imports # external imports
import confini import confini
from hexathon import ( from chainlib.chain import ChainSpec
strip_0x,
add_0x,
)
from chainsyncer.backend.memory import MemBackend
from chainsyncer.driver.head import HeadSyncer
from chainlib.eth.connection import EthHTTPConnection
from chainlib.eth.block import (
block_latest,
)
from chainlib.hash import keccak256_string_to_hex
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.block import (
block_latest,
)
from chainlib.eth.connection import EthHTTPConnection
from chainlib.eth.error import (
RequestMismatchException,
)
from chainlib.eth.gas import OverrideGasOracle from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import TxFactory from chainlib.hash import keccak256_string_to_hex
from chainlib.jsonrpc import JSONRPCRequest from chainsyncer.backend.memory import MemBackend
from chainlib.eth.error import ( from chainsyncer.driver.head import HeadSyncer
EthException,
RequestMismatchException,
)
from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore
from cic_types.models.person import Person
from eth_erc20 import ERC20
from cic_eth.cli.chain import chain_interface from cic_eth.cli.chain import chain_interface
from cic_types.models.person import Person
from eth_accounts_index import AccountsIndex from eth_accounts_index import AccountsIndex
from eth_contract_registry import Registry from eth_contract_registry import Registry
from eth_erc20 import ERC20
from eth_token_index import TokenUniqueSymbolIndex from eth_token_index import TokenUniqueSymbolIndex
from funga.eth.keystore.dict import DictKeystore
from funga.eth.signer import EIP155Signer
from hexathon import (
strip_0x,
)
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()

View File

@ -2,8 +2,8 @@
import logging import logging
# external imports # external imports
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from funga.eth.signer import EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)

View File

@ -9,8 +9,8 @@ from chainlib.chain import ChainSpec
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.connection import EthHTTPConnection from chainlib.eth.connection import EthHTTPConnection
from confini import Config from confini import Config
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from funga.eth.signer import EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
# local imports # local imports
from import_util import BalanceProcessor, get_celery_worker_status from import_util import BalanceProcessor, get_celery_worker_status

View File

@ -35,6 +35,6 @@ RUN pip install --index-url $PIP_INDEX_URL \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt -r requirements.txt
COPY . . COPY . .
ENTRYPOINT [ ] ENTRYPOINT [ ]

View File

@ -33,8 +33,8 @@ from chainlib.eth.error import (
RequestMismatchException, RequestMismatchException,
) )
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from funga.eth.signer import EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
from cic_types.models.person import Person from cic_types.models.person import Person
from eth_erc20 import ERC20 from eth_erc20 import ERC20
from cic_eth.cli.chain import chain_interface from cic_eth.cli.chain import chain_interface

View File

@ -27,9 +27,9 @@ from cic_types.processor import generate_metadata_pointer
from cic_types import MetadataPointer from cic_types import MetadataPointer
from eth_accounts_index.registry import AccountRegistry from eth_accounts_index.registry import AccountRegistry
from eth_contract_registry import Registry from eth_contract_registry import Registry
from crypto_dev_signer.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer from funga.eth.signer.defaultsigner import EIP155Signer
from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict from funga.eth.keystore.keyfile import to_dict as to_keyfile_dict
# local imports # local imports
from common.dirs import initialize_dirs from common.dirs import initialize_dirs

View File

@ -1,15 +1,15 @@
sarafu-faucet~=0.0.7a2 sarafu-faucet~=0.0.7a2
cic-eth[tools]~=0.12.4a13 cic-eth[tools]~=0.12.5a11
cic-types~=0.2.1a2 cic-types~=0.2.1a8
funga>=0.5.1a1,<=0.5.15 funga>=0.5.1
faker==4.17.1 faker==4.17.1
chainsyncer~=0.0.7a3 chainsyncer~=0.0.7a3
chainlib-eth~=0.0.10a18 chainlib-eth~=0.0.15
eth-address-index~=0.2.4a1 eth-address-index~=0.2.4a1
eth-contract-registry~=0.6.3a3 eth-contract-registry~=0.6.3a3
eth-accounts-index~=0.1.2a3 eth-accounts-index~=0.1.2a3
eth-erc20~=0.1.2a3 eth-erc20==0.1.4
erc20-faucet~=0.3.2a2 erc20-faucet~=0.3.2a2
psycopg2==2.8.6 psycopg2==2.8.6
liveness~=0.0.1a7 liveness~=0.0.1a7
confini>=0.4.2rc3,<0.5.0 confini>=0.5.2

View File

@ -68,7 +68,11 @@ admin_tests = [
'local_key', 'local_key',
] ]
all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests cache_tests = [
'cache_tx_user',
]
all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests + cache_tests
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
@ -77,11 +81,14 @@ argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', defa
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url') argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url') argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
argparser.add_argument('--cache-provider', type=str, dest='cache_provider', default='http://localhost:63313', help='cic-cache url')
argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications') argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications')
argparser.add_argument('--skip-ussd', dest='skip_ussd', action='store_true', help='skip all ussd verifications') argparser.add_argument('--skip-ussd', dest='skip_ussd', action='store_true', help='skip all ussd verifications')
argparser.add_argument('--skip-metadata', dest='skip_metadata', action='store_true', help='skip all metadata verifications') argparser.add_argument('--skip-metadata', dest='skip_metadata', action='store_true', help='skip all metadata verifications')
argparser.add_argument('--skip-cache', dest='skip_cache', action='store_true', help='skip all cache verifications')
argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification') argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification')
argparser.add_argument('--include', action='append', type=str, help='include specified verification') argparser.add_argument('--include', action='append', type=str, help='include specified verification')
argparser.add_argument('--list-verifications', action='store_true', help='print a list of verification check identifiers')
argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol', help='Token symbol to use for trnsactions') argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address') argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
@ -115,6 +122,7 @@ config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL') config.censor('PASSWORD', 'SSL')
config.add(args.meta_provider, '_META_PROVIDER', True) config.add(args.meta_provider, '_META_PROVIDER', True)
config.add(args.ussd_provider, '_USSD_PROVIDER', True) config.add(args.ussd_provider, '_USSD_PROVIDER', True)
config.add(args.cache_provider, '_CACHE_PROVIDER', True)
token_symbol = args.token_symbol token_symbol = args.token_symbol
@ -351,6 +359,24 @@ class Verifier:
raise VerifierError(o_retrieved, 'metadata (person)') raise VerifierError(o_retrieved, 'metadata (person)')
def verify_cache_tx_user(self, address, balance=None):
url = os.path.join(config.get('_CACHE_PROVIDER'), 'txa', 'user', address)
req = urllib.request.Request(url)
req.add_header('X_CIC_CACHE_MODE', 'all')
try:
res = urllib.request.urlopen(req)
except urllib.error.HTTPError as e:
raise VerifierError(
'({}) {}'.format(url, e),
'cache (tx user)',
)
r = json.load(res)
if len(r['data']) == 0:
raise VerifierError('empty tx list for address {}'.format(address), 'cache (tx user)')
for tx in r['data']:
logg.warning('found tx {} for {} but not checking validity'.format(tx['tx_hash'], address))
def verify_metadata_phone(self, address, balance=None): def verify_metadata_phone(self, address, balance=None):
upper_address = strip_0x(address).upper() upper_address = strip_0x(address).upper()
f = open(os.path.join( f = open(os.path.join(
@ -397,11 +423,13 @@ class Verifier:
if m != 'CON Welcome': if m != 'CON Welcome':
raise VerifierError(response_data, 'ussd') raise VerifierError(response_data, 'ussd')
def verify_ussd_pins(self, address, balance): def verify_ussd_pins(self, address, balance):
response_data = send_ussd_request(address, self.data_dir) response_data = send_ussd_request(address, self.data_dir)
if response_data[:11] != 'CON Balance' and response_data[:9] != 'CON Salio': if response_data[:11] != 'CON Balance' and response_data[:9] != 'CON Salio':
raise VerifierError(response_data, 'pins') raise VerifierError(response_data, 'pins')
def verify(self, address, balance, debug_stem=None): def verify(self, address, balance, debug_stem=None):
for k in active_tests: for k in active_tests:

View File

@ -0,0 +1,18 @@
#!/bin/bash
default_token_address=`eth-contract-registry-list --raw -e $CIC_REGISTRY_ADDRESS DefaultToken`
export TOKEN_ADDRESSES=${TOKEN_ADDRESSES:-$default_token_address}
IFS="," read -r -a token_addresses <<< $TOKEN_ADDRESSES
export RPC_VERIFY=1
for token_address in ${token_addresses[@]}; do
>&2 echo checking token address $token_address
t=`eth-encode --signature demurrageTimestamp -e $token_address --notx`
v=`eth-encode --signature demurrageAmount -e $token_address --notx`
>&2 echo last demurrage apply call for $token_address was value $v at $t
if [ "$?" -eq 0 ]; then
h=`eth-encode --signature applyDemurrage -i $CHAIN_SPEC -y $WALLET_KEY_FILE -e $token_address --fee-limit 8000000 -s -ww`
>&2 echo applied demurrage on $token_address tx hash $h
fi
done

View File

@ -153,7 +153,7 @@ services:
SIGNER_PROVIDER: ${SIGNER_PROVIDER:-http://cic-signer:8000} SIGNER_PROVIDER: ${SIGNER_PROVIDER:-http://cic-signer:8000}
SIGNER_SECRET: ${SIGNER_SECRET:-deadbeef} SIGNER_SECRET: ${SIGNER_SECRET:-deadbeef}
TASKS_TRACE_QUEUE_STATUS: ${TASKS_TRACE_QUEUE_STATUS:-1} TASKS_TRACE_QUEUE_STATUS: ${TASKS_TRACE_QUEUE_STATUS:-1}
ETH_MIN_FEE_PRICE: $ETH_MIN_FEE_PRICE ETH_MIN_FEE_PRICE: ${ETH_MIN_FEE_PRICE:-1000000000}
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
- evm - evm
@ -164,13 +164,13 @@ services:
- signer-data:/run/crypto-dev-signer - signer-data:/run/crypto-dev-signer
- contract-config:/tmp/cic/config/:ro - contract-config:/tmp/cic/config/:ro
command: command:
- /bin/bash - /bin/bash
- -c - -c
- | - |
set -a set -a
if [[ -f /tmp/cic/config/env_reset ]]; then source /tmp/cic/config/env_reset; fi if [[ -f /tmp/cic/config/env_reset ]]; then source /tmp/cic/config/env_reset; fi
set +a set +a
./start_tasker.sh --aux-all -q cic-eth -vv ./start_tasker.sh --aux-all -q cic-eth -vv
cic-eth-tracker: cic-eth-tracker:
@ -203,7 +203,7 @@ services:
SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5} SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5}
SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1} SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1}
SYNCER_OFFSET: ${SYNCER_OFFSET:-0} SYNCER_OFFSET: ${SYNCER_OFFSET:-0}
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-"cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback"} TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback}
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
- evm - evm
@ -342,7 +342,7 @@ services:
SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5} SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5}
SYNCER_OFFSET: ${SYNCER_OFFSET:-0} SYNCER_OFFSET: ${SYNCER_OFFSET:-0}
SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1} SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1}
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-"cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback"} TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback}
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
- evm - evm
@ -497,7 +497,7 @@ services:
dockerfile: docker/Dockerfile dockerfile: docker/Dockerfile
args: args:
PIP_INDEX_URL: ${PIP_INDEX_URL:-https://pypi.org/simple} PIP_INDEX_URL: ${PIP_INDEX_URL:-https://pypi.org/simple}
EXTRA_PIP_INDEX_URL: ${EXTRA_PIP_INDEX_URL:-https://pip.grassrootseconomics.net:8433} EXTRA_PIP_INDEX_URL: ${EXTRA_PIP_INDEX_URL:-https://pip.grassrootseconomics.net}
EXTRA_PIP_ARGS: $EXTRA_PIP_ARGS EXTRA_PIP_ARGS: $EXTRA_PIP_ARGS
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics} DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
environment: environment: