Merge branch 'lash/improve-cache' into 'master'
refactor: Improve cic-cache See merge request grassrootseconomics/cic-internal-integration!303
This commit is contained in:
commit
03c7c1ddbc
@ -1 +1 @@
|
||||
include *requirements.txt cic_cache/data/config/*
|
||||
include *requirements.txt cic_cache/data/config/* cic_cache/db/migrations/default/* cic_cache/db/migrations/default/versions/*
|
||||
|
@ -1,4 +1,4 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
|
||||
health_modules =
|
||||
|
@ -3,7 +3,8 @@ engine =
|
||||
driver =
|
||||
host =
|
||||
port =
|
||||
name = cic-cache
|
||||
#name = cic-cache
|
||||
prefix =
|
||||
user =
|
||||
password =
|
||||
debug = 0
|
||||
|
@ -9,21 +9,26 @@ from .list import (
|
||||
tag_transaction,
|
||||
add_tag,
|
||||
)
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def dsn_from_config(config):
|
||||
def dsn_from_config(config, name):
|
||||
scheme = config.get('DATABASE_ENGINE')
|
||||
if config.get('DATABASE_DRIVER') != None:
|
||||
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
|
||||
|
||||
database_name = name
|
||||
if config.get('DATABASE_PREFIX'):
|
||||
database_name = '{}_{}'.format(config.get('DATABASE_PREFIX'), database_name)
|
||||
dsn = ''
|
||||
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
SessionBase.poolable = False
|
||||
dsn = '{}:///{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_NAME'),
|
||||
database_name,
|
||||
)
|
||||
|
||||
else:
|
||||
@ -33,7 +38,7 @@ def dsn_from_config(config):
|
||||
config.get('DATABASE_PASSWORD'),
|
||||
config.get('DATABASE_HOST'),
|
||||
config.get('DATABASE_PORT'),
|
||||
config.get('DATABASE_NAME'),
|
||||
database_name,
|
||||
)
|
||||
logg.debug('parsed dsn from config: {}'.format(dsn))
|
||||
return dsn
|
||||
|
@ -5,7 +5,11 @@ import re
|
||||
import base64
|
||||
|
||||
# external imports
|
||||
from hexathon import add_0x
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
strip_0x,
|
||||
)
|
||||
from chainlib.encode import TxHexNormalizer
|
||||
|
||||
# local imports
|
||||
from cic_cache.cache import (
|
||||
@ -16,27 +20,72 @@ from cic_cache.cache import (
|
||||
logg = logging.getLogger(__name__)
|
||||
#logg = logging.getLogger()
|
||||
|
||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||
re_transactions_all_bloom = r'/tx/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
|
||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
|
||||
re_transactions_all_data = r'/txa/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
|
||||
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
re_default_limit = r'/defaultlimit/?'
|
||||
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
def parse_query_account(r):
|
||||
address = strip_0x(r[1])
|
||||
#address = tx_normalize.wallet_address(address)
|
||||
limit = DEFAULT_LIMIT
|
||||
g = r.groups()
|
||||
if len(g) > 3:
|
||||
limit = int(r[4])
|
||||
if limit == 0:
|
||||
limit = DEFAULT_LIMIT
|
||||
offset = 0
|
||||
if len(g) > 4:
|
||||
offset = int(r[6])
|
||||
|
||||
logg.debug('account query is address {} offset {} limit {}'.format(address, offset, limit))
|
||||
|
||||
return (address, offset, limit,)
|
||||
|
||||
|
||||
# r is an re.Match
|
||||
def parse_query_any(r):
|
||||
limit = DEFAULT_LIMIT
|
||||
offset = 0
|
||||
block_offset = None
|
||||
block_end = None
|
||||
if r.lastindex != None:
|
||||
if r.lastindex > 0:
|
||||
limit = int(r[1])
|
||||
if r.lastindex > 1:
|
||||
offset = int(r[2])
|
||||
if r.lastindex > 2:
|
||||
block_offset = int(r[3])
|
||||
if r.lastindex > 3:
|
||||
block_end = int(r[4])
|
||||
if block_end < block_offset:
|
||||
raise ValueError('cart before the horse, dude')
|
||||
|
||||
logg.debug('data query is offset {} limit {} block_offset {} block_end {}'.format(offset, limit, block_offset, block_end))
|
||||
|
||||
return (offset, limit, block_offset, block_end,)
|
||||
|
||||
|
||||
def process_default_limit(session, env):
|
||||
r = re.match(re_default_limit, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
|
||||
return ('application/json', str(DEFAULT_LIMIT).encode('utf-8'),)
|
||||
|
||||
|
||||
def process_transactions_account_bloom(session, env):
|
||||
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
logg.debug('match account bloom')
|
||||
|
||||
address = r[1]
|
||||
if r[2] == None:
|
||||
address = add_0x(address)
|
||||
offset = 0
|
||||
if r.lastindex > 2:
|
||||
offset = r[4]
|
||||
limit = DEFAULT_LIMIT
|
||||
if r.lastindex > 4:
|
||||
limit = r[6]
|
||||
(address, offset, limit,) = parse_query_account(r)
|
||||
|
||||
c = BloomCache(session)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||
@ -59,13 +108,9 @@ def process_transactions_all_bloom(session, env):
|
||||
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
logg.debug('match all bloom')
|
||||
|
||||
offset = DEFAULT_LIMIT
|
||||
if r.lastindex > 0:
|
||||
offset = r[1]
|
||||
limit = 0
|
||||
if r.lastindex > 1:
|
||||
limit = r[2]
|
||||
(limit, offset, block_offset, block_end,) = parse_query_any(r)
|
||||
|
||||
c = BloomCache(session)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||
@ -88,17 +133,16 @@ def process_transactions_all_data(session, env):
|
||||
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
return None
|
||||
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
# return None
|
||||
logg.debug('match all data')
|
||||
|
||||
logg.debug('got data request {}'.format(env))
|
||||
block_offset = r[1]
|
||||
block_end = r[2]
|
||||
if int(r[2]) < int(r[1]):
|
||||
raise ValueError('cart before the horse, dude')
|
||||
|
||||
(offset, limit, block_offset, block_end) = parse_query_any(r)
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset, block_end, oldest=True) # oldest needs to be settable
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
@ -113,3 +157,30 @@ def process_transactions_all_data(session, env):
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
|
||||
def process_transactions_account_data(session, env):
|
||||
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
logg.debug('match account data')
|
||||
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
# return None
|
||||
|
||||
(address, offset, limit,) = parse_query_account(r)
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
|
||||
o = {
|
||||
'low': lowest_block,
|
||||
'high': highest_block,
|
||||
'data': tx_cache,
|
||||
}
|
||||
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
@ -12,21 +12,20 @@ import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.runnable.daemons.query import (
|
||||
process_default_limit,
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_account_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_all_data,
|
||||
)
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
@ -35,7 +34,7 @@ args = argparser.parse_args()
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
dsn = dsn_from_config(config, 'cic_cache')
|
||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||
|
||||
|
||||
@ -47,9 +46,11 @@ def application(env, start_response):
|
||||
|
||||
session = SessionBase.create_session()
|
||||
for handler in [
|
||||
process_transactions_account_data,
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_all_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_account_bloom,
|
||||
process_default_limit,
|
||||
]:
|
||||
r = None
|
||||
try:
|
||||
|
@ -3,6 +3,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import tempfile
|
||||
|
||||
# third-party imports
|
||||
import celery
|
||||
@ -28,7 +29,7 @@ args = argparser.parse_args()
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
dsn = dsn_from_config(config, 'cic_cache')
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
# set up celery
|
||||
|
@ -50,7 +50,7 @@ args = argparser.parse_args()
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
dsn = dsn_from_config(config, 'cic_cache')
|
||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
# set up rpc
|
||||
|
@ -5,7 +5,7 @@ version = (
|
||||
0,
|
||||
2,
|
||||
1,
|
||||
'alpha.2',
|
||||
'alpha.3',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
@ -1,3 +0,0 @@
|
||||
[celery]
|
||||
broker_url = redis:///
|
||||
result_url = redis:///
|
@ -1,3 +0,0 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
0
apps/cic-cache/config/config.ini
Normal file
0
apps/cic-cache/config/config.ini
Normal file
@ -1,9 +0,0 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
@ -1,3 +0,0 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:63379
|
||||
result_url = redis://localhost:63379
|
@ -1,3 +0,0 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
@ -1,9 +0,0 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=grassroots
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=63432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
@ -1,4 +0,0 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
offset = 0
|
||||
no_history = 0
|
@ -1,2 +0,0 @@
|
||||
[bancor]
|
||||
dir =
|
@ -1,4 +1,3 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
chain_spec =
|
||||
trust_address =
|
||||
|
@ -1,5 +1,5 @@
|
||||
[database]
|
||||
NAME=cic-cache-test
|
||||
PREFIX=cic-cache-test
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
|
@ -1,5 +0,0 @@
|
||||
[eth]
|
||||
#ws_provider = ws://localhost:8546
|
||||
#ttp_provider = http://localhost:8545
|
||||
provider = http://localhost:8545
|
||||
#chain_id =
|
@ -1,4 +1,4 @@
|
||||
openapi: "3.0.3"
|
||||
openapi: "3.0.2"
|
||||
info:
|
||||
title: Grassroots Economics CIC Cache
|
||||
description: Cache of processed transaction data from Ethereum blockchain and worker queues
|
||||
@ -9,17 +9,34 @@ info:
|
||||
email: will@grassecon.org
|
||||
license:
|
||||
name: GPLv3
|
||||
version: 0.1.0
|
||||
version: 0.2.0
|
||||
|
||||
paths:
|
||||
/tx/{offset}/{limit}:
|
||||
description: Bloom filter for batch of latest transactions
|
||||
/defaultlimit:
|
||||
summary: The default limit value of result sets.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve default limit
|
||||
operationId: limit.default
|
||||
responses:
|
||||
200:
|
||||
description: Limit query successful
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Limit"
|
||||
|
||||
/tx:
|
||||
summary: Bloom filter for batch of latest transactions
|
||||
description: Generate a bloom filter of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get
|
||||
operationId: tx.get.latest
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
@ -29,27 +46,109 @@ paths:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
|
||||
parameters:
|
||||
- name: offset
|
||||
in: path
|
||||
/tx/{limit}:
|
||||
summary: Bloom filter for batch of latest transactions
|
||||
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.latest.limit
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful. Results are ordered from newest to oldest.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{address}/{offset}/{limit}:
|
||||
description: Bloom filter for batch of latest transactions by account
|
||||
/tx/{limit}/{offset}:
|
||||
summary: Bloom filter for batch of latest transactions
|
||||
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get
|
||||
operationId: tx.get.latest.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful. Results are ordered from newest to oldest.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{limit}/{offset}/{block_offset}:
|
||||
summary: Bloom filter for batch of transactions since a particular block.
|
||||
description: Generate a bloom filter of the latest transactions since a particular block in the cache. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.latest.range.block.offset
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful. Results are ordered from oldest to newest.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{limit}/{offset}/{block_offset}/{block_end}:
|
||||
summary: Bloom filter for batch of transactions within a particular block range.
|
||||
description: Generate a bloom filter of the latest transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.latest.range.block.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
@ -58,6 +157,49 @@ paths:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_end
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{address}:
|
||||
summary: Bloom filter for batch of latest transactions by account.
|
||||
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.user
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
@ -65,26 +207,342 @@ paths:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: offset
|
||||
in: path
|
||||
|
||||
|
||||
/tx/{address}/{limit}:
|
||||
summary: Bloom filter for batch of latest transactions by account.
|
||||
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.user.limit
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{address}/{limit}/{offset}:
|
||||
summary: Bloom filter for batch of latest transactions by account
|
||||
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get.user.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/txa:
|
||||
summary: Cached data for latest transactions.
|
||||
description: Return data entries of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.latest
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
|
||||
/txa/{limit}:
|
||||
summary: Cached data for latest transactions.
|
||||
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.latest.limit
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/txa/{limit}/{offset}:
|
||||
summary: Cached data for latest transactions.
|
||||
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.latest.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/txa/{limit}/{offset}/{block_offset}:
|
||||
summary: Cached data for transactions since a particular block.
|
||||
description: Return cached data entries of transactions since a particular block. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.latest.range.block.offset
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
/txa/{limit}/{offset}/{block_offset}/{block_end}:
|
||||
summary: Cached data for transactions within a particular block range.
|
||||
description: Return cached data entries of transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.latest.range.block.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: block_end
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/txa/{address}:
|
||||
summary: Cached data for batch of latest transactions by account.
|
||||
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.user
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
|
||||
|
||||
/txa/{address}/{limit}:
|
||||
summary: Cached data for batch of latest transactions by account.
|
||||
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.user.limit
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/txa/{address}/{limit}/{offset}:
|
||||
summary: Cached data for batch of latest transactions by account.
|
||||
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: txa.get.user.range
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/TransactionList"
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: limit
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: offset
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
components:
|
||||
schemas:
|
||||
Limit:
|
||||
type: integer
|
||||
format: int32
|
||||
BlocksBloom:
|
||||
type: object
|
||||
properties:
|
||||
low:
|
||||
type: int
|
||||
type: integer
|
||||
format: int32
|
||||
description: The lowest block number included in the filter
|
||||
high:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The highest block number included in the filter
|
||||
block_filter:
|
||||
type: string
|
||||
format: byte
|
||||
@ -97,6 +555,89 @@ components:
|
||||
type: string
|
||||
description: Hashing algorithm (currently only using sha256)
|
||||
filter_rounds:
|
||||
type: int
|
||||
type: integer
|
||||
format: int32
|
||||
description: Number of hash rounds used to create the filter
|
||||
TransactionList:
|
||||
type: object
|
||||
properties:
|
||||
low:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The lowest block number included in the result set
|
||||
high:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The highest block number included in the filter
|
||||
data:
|
||||
type: array
|
||||
description: Cached transaction data
|
||||
items:
|
||||
$ref: "#/components/schemas/Transaction"
|
||||
Transaction:
|
||||
type: object
|
||||
properties:
|
||||
block_number:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Block number transaction was included in.
|
||||
tx_hash:
|
||||
type: string
|
||||
description: Transaction hash, in hex.
|
||||
date_block:
|
||||
type: integer
|
||||
format: int32
|
||||
description: Block timestamp.
|
||||
sender:
|
||||
type: string
|
||||
description: Spender address, in hex.
|
||||
recipient:
|
||||
type: string
|
||||
description: Beneficiary address, in hex.
|
||||
from_value:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Value deducted from spender's balance.
|
||||
to_value:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Value added to beneficiary's balance.
|
||||
source_token:
|
||||
type: string
|
||||
description: Network address of token in which `from_value` is denominated.
|
||||
destination_token:
|
||||
type: string
|
||||
description: Network address of token in which `to_value` is denominated.
|
||||
success:
|
||||
type: boolean
|
||||
description: Network consensus state on whether the transaction was successful or not.
|
||||
tx_type:
|
||||
type: string
|
||||
enum:
|
||||
- erc20.faucet
|
||||
- faucet.give_to
|
||||
|
||||
examples:
|
||||
data_last:
|
||||
summary: Get the latest cached transactions, using the server's default limit.
|
||||
value: "/txa"
|
||||
|
||||
data_limit:
|
||||
summary: Get the last 42 cached transactions.
|
||||
value: "/txa/42"
|
||||
|
||||
data_range:
|
||||
summary: Get the next 42 cached transactions, starting from the 13th (zero-indexed).
|
||||
value: "/txa/42/13"
|
||||
|
||||
data_range_block_offset:
|
||||
summary: Get the next 42 cached transactions, starting from block 1337 (inclusive).
|
||||
value: "/txa/42/0/1337"
|
||||
|
||||
data_range_block_offset:
|
||||
summary: Get the next 42 cached transactions within blocks 1337 and 1453 (inclusive).
|
||||
value: "/txa/42/0/1337/1453"
|
||||
|
||||
data_range_block_range:
|
||||
summary: Get the next 42 cached transactions after the 13th, within blocks 1337 and 1453 (inclusive).
|
||||
value: "/txa/42/13/1337/1453"
|
||||
|
@ -4,9 +4,9 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net"
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
@ -14,14 +14,9 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-cache/
|
||||
RUN pip install . --extra-index-url $EXTRA_PIP_INDEX_URL
|
||||
|
||||
# for db migrations
|
||||
COPY ./aux/wait-for-it/wait-for-it.sh ./
|
||||
|
@ -2,5 +2,5 @@
|
||||
|
||||
set -e
|
||||
>&2 echo executing database migration
|
||||
python scripts/migrate.py --migrations-dir /usr/local/share/cic-cache/alembic -vv
|
||||
python scripts/migrate_cic_cache.py --migrations-dir /usr/local/share/cic-cache/alembic -vv
|
||||
set +e
|
||||
|
@ -1,14 +1,15 @@
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
confini~=0.5.3
|
||||
uwsgi==2.0.19.1
|
||||
moolb~=0.1.1b2
|
||||
cic-eth-registry~=0.6.1a1
|
||||
moolb~=0.2.0
|
||||
cic-eth-registry~=0.6.6
|
||||
SQLAlchemy==1.3.20
|
||||
semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
erc20-faucet>=0.3.2a2, <0.4.0
|
||||
chainlib-eth>=0.0.9a14,<0.1.0
|
||||
eth-address-index>=0.2.3a4,<0.3.0
|
||||
chainsyncer[sql]~=0.0.7
|
||||
erc20-faucet~=0.3.2
|
||||
chainlib-eth~=0.0.15
|
||||
eth-address-index~=0.2.4
|
||||
okota~=0.2.5
|
||||
|
@ -1,54 +1,55 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
from cic_cache.db import dsn_from_config
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
# BUG: the dbdir doesn't work after script install
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(cic_cache.__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
default_migrations_dir = os.path.join(dbdir, 'migrations')
|
||||
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
|
||||
|
||||
#config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||
argparser.add_argument('-f', action='store_true', help='force action')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
argparser.add_argument('-f', '--force', action='store_true', help='force action')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='migrations directory')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
extra_args = {
|
||||
'reset': None,
|
||||
'force': None,
|
||||
'migrations_dir': None,
|
||||
}
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
|
||||
|
||||
config = confini.Config(configdir, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
|
||||
migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE', 'default'))
|
||||
if not os.path.isdir(migrations_dir):
|
||||
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||
migrations_dir = os.path.join(args.migrations_dir, 'default')
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
dsn = dsn_from_config(config, 'cic_cache')
|
||||
|
||||
|
||||
logg.info('using migrations dir {}'.format(migrations_dir))
|
@ -1,6 +1,7 @@
|
||||
[metadata]
|
||||
name = cic-cache
|
||||
description = CIC Cache API and server
|
||||
version = 0.3.0a2
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/grassrootseconomics/cic-eth
|
||||
@ -34,7 +35,7 @@ packages =
|
||||
cic_cache.runnable.daemons
|
||||
cic_cache.runnable.daemons.filters
|
||||
scripts =
|
||||
./scripts/migrate.py
|
||||
./scripts/migrate_cic_cache.py
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
|
@ -1,38 +1,39 @@
|
||||
from setuptools import setup
|
||||
|
||||
import configparser
|
||||
# import configparser
|
||||
import os
|
||||
import time
|
||||
|
||||
from cic_cache.version import (
|
||||
version_object,
|
||||
version_string
|
||||
)
|
||||
# import time
|
||||
|
||||
class PleaseCommitFirstError(Exception):
|
||||
pass
|
||||
|
||||
def git_hash():
|
||||
import subprocess
|
||||
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
|
||||
if len(git_diff.stdout) > 0:
|
||||
raise PleaseCommitFirstError()
|
||||
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
|
||||
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
|
||||
return git_hash_brief
|
||||
|
||||
version_string = str(version_object)
|
||||
|
||||
try:
|
||||
version_git = git_hash()
|
||||
version_string += '+build.{}'.format(version_git)
|
||||
except FileNotFoundError:
|
||||
time_string_pair = str(time.time()).split('.')
|
||||
version_string += '+build.{}{:<09d}'.format(
|
||||
time_string_pair[0],
|
||||
int(time_string_pair[1]),
|
||||
)
|
||||
print('final version string will be {}'.format(version_string))
|
||||
# from cic_cache.version import (
|
||||
# version_object,
|
||||
# version_string
|
||||
# )
|
||||
#
|
||||
# class PleaseCommitFirstError(Exception):
|
||||
# pass
|
||||
#
|
||||
# def git_hash():
|
||||
# import subprocess
|
||||
# git_diff = subprocess.run(['git', 'diff'], capture_output=True)
|
||||
# if len(git_diff.stdout) > 0:
|
||||
# raise PleaseCommitFirstError()
|
||||
# git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
|
||||
# git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
|
||||
# return git_hash_brief
|
||||
#
|
||||
# version_string = str(version_object)
|
||||
#
|
||||
# try:
|
||||
# version_git = git_hash()
|
||||
# version_string += '+build.{}'.format(version_git)
|
||||
# except FileNotFoundError:
|
||||
# time_string_pair = str(time.time()).split('.')
|
||||
# version_string += '+build.{}{:<09d}'.format(
|
||||
# time_string_pair[0],
|
||||
# int(time_string_pair[1]),
|
||||
# )
|
||||
# print('final version string will be {}'.format(version_string))
|
||||
|
||||
requirements = []
|
||||
f = open('requirements.txt', 'r')
|
||||
@ -52,9 +53,8 @@ while True:
|
||||
test_requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
|
||||
setup(
|
||||
version=version_string,
|
||||
# version=version_string,
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
)
|
||||
|
@ -7,4 +7,4 @@ pytest-celery==0.0.0a1
|
||||
eth_tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
sarafu-faucet~=0.0.7a1
|
||||
erc20-transfer-authorization>=0.3.5a1,<0.4.0
|
||||
erc20-transfer-authorization~=0.3.6
|
||||
|
@ -6,6 +6,7 @@ import datetime
|
||||
# external imports
|
||||
import pytest
|
||||
import moolb
|
||||
from chainlib.encode import TxHexNormalizer
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
@ -42,6 +43,8 @@ def txs(
|
||||
list_tokens,
|
||||
):
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_number = 13
|
||||
@ -54,10 +57,10 @@ def txs(
|
||||
tx_hash_first,
|
||||
list_defaults['block'],
|
||||
tx_number,
|
||||
list_actors['alice'],
|
||||
list_actors['bob'],
|
||||
list_tokens['foo'],
|
||||
list_tokens['foo'],
|
||||
tx_normalize.wallet_address(list_actors['alice']),
|
||||
tx_normalize.wallet_address(list_actors['bob']),
|
||||
tx_normalize.executable_address(list_tokens['foo']),
|
||||
tx_normalize.executable_address(list_tokens['foo']),
|
||||
1024,
|
||||
2048,
|
||||
True,
|
||||
@ -74,10 +77,10 @@ def txs(
|
||||
tx_hash_second,
|
||||
list_defaults['block']-1,
|
||||
tx_number,
|
||||
list_actors['diane'],
|
||||
list_actors['alice'],
|
||||
list_tokens['foo'],
|
||||
list_tokens['foo'],
|
||||
tx_normalize.wallet_address(list_actors['diane']),
|
||||
tx_normalize.wallet_address(list_actors['alice']),
|
||||
tx_normalize.executable_address(list_tokens['foo']),
|
||||
tx_normalize.wallet_address(list_tokens['foo']),
|
||||
1024,
|
||||
2048,
|
||||
False,
|
||||
@ -103,6 +106,8 @@ def more_txs(
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
tx_number = 666
|
||||
tx_hash = '0x' + os.urandom(32).hex()
|
||||
tx_signed = '0x' + os.urandom(128).hex()
|
||||
@ -115,10 +120,10 @@ def more_txs(
|
||||
tx_hash,
|
||||
list_defaults['block']+2,
|
||||
tx_number,
|
||||
list_actors['alice'],
|
||||
list_actors['diane'],
|
||||
list_tokens['bar'],
|
||||
list_tokens['bar'],
|
||||
tx_normalize.wallet_address(list_actors['alice']),
|
||||
tx_normalize.wallet_address(list_actors['diane']),
|
||||
tx_normalize.executable_address(list_tokens['bar']),
|
||||
tx_normalize.executable_address(list_tokens['bar']),
|
||||
2048,
|
||||
4096,
|
||||
False,
|
||||
|
@ -14,7 +14,8 @@ logg = logging.getLogger(__file__)
|
||||
@pytest.fixture(scope='session')
|
||||
def load_config():
|
||||
config_dir = os.path.join(root_dir, 'config/test')
|
||||
conf = confini.Config(config_dir, 'CICTEST')
|
||||
schema_config_dir = os.path.join(root_dir, 'cic_cache', 'data', 'config')
|
||||
conf = confini.Config(schema_config_dir, 'CICTEST', override_dirs=config_dir)
|
||||
conf.process()
|
||||
logg.debug('config {}'.format(conf))
|
||||
return conf
|
||||
|
@ -24,11 +24,15 @@ def database_engine(
|
||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
SessionBase.transactional = False
|
||||
SessionBase.poolable = False
|
||||
name = 'cic_cache'
|
||||
database_name = name
|
||||
if load_config.get('DATABASE_PREFIX'):
|
||||
database_name = '{}_{}'.format(load_config.get('DATABASE_PREFIX'), database_name)
|
||||
try:
|
||||
os.unlink(load_config.get('DATABASE_NAME'))
|
||||
os.unlink(database_name)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
dsn = dsn_from_config(load_config)
|
||||
dsn = dsn_from_config(load_config, name)
|
||||
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
|
||||
return dsn
|
||||
|
||||
|
@ -14,7 +14,7 @@ def test_api_all_data(
|
||||
):
|
||||
|
||||
env = {
|
||||
'PATH_INFO': '/txa/410000/420000',
|
||||
'PATH_INFO': '/txa/100/0/410000/420000',
|
||||
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||
}
|
||||
j = process_transactions_all_data(init_database, env)
|
||||
@ -23,7 +23,7 @@ def test_api_all_data(
|
||||
assert len(o['data']) == 2
|
||||
|
||||
env = {
|
||||
'PATH_INFO': '/txa/420000/410000',
|
||||
'PATH_INFO': '/txa/100/0/420000/410000',
|
||||
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,7 @@ import json
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from chainlib.encode import TxHexNormalizer
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
@ -62,6 +63,8 @@ def test_cache_ranges(
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
@ -100,32 +103,39 @@ def test_cache_ranges(
|
||||
assert b[1] == mid
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account(account, 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['bob'])
|
||||
b = c.load_transactions_account(account, 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['diane'])
|
||||
b = c.load_transactions_account(account, 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['bob'])
|
||||
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['diane'])
|
||||
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
|
||||
@ -140,6 +150,8 @@ def test_cache_ranges_data(
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
@ -203,7 +215,8 @@ def test_cache_ranges_data(
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
@ -211,13 +224,15 @@ def test_cache_ranges_data(
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['bob'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
|
||||
account = tx_normalize.wallet_address(list_actors['diane'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
@ -225,27 +240,31 @@ def test_cache_ranges_data(
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['alice'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['bob'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
account = tx_normalize.wallet_address(list_actors['diane'])
|
||||
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
assert len(b[2]) == 1
|
||||
|
@ -82,7 +82,7 @@ def test_query_regex(
|
||||
[
|
||||
('alice', None, None, [(420000, 13), (419999, 42)]),
|
||||
('alice', None, 1, [(420000, 13)]),
|
||||
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||
('alice', 1, 1, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||
('alice', 2, None, []), # 420000 == list_defaults['block']
|
||||
],
|
||||
)
|
||||
@ -107,10 +107,11 @@ def test_query_process_txs_account(
|
||||
path_info = '/tx/user/0x' + strip_0x(actor)
|
||||
if query_offset != None:
|
||||
path_info += '/' + str(query_offset)
|
||||
if query_limit != None:
|
||||
if query_limit == None:
|
||||
query_limit = 100
|
||||
path_info += '/' + str(query_limit)
|
||||
if query_offset == None:
|
||||
path_info += '/0'
|
||||
path_info += '/' + str(query_limit)
|
||||
env = {
|
||||
'PATH_INFO': path_info,
|
||||
}
|
||||
@ -192,7 +193,7 @@ def test_query_process_txs_bloom(
|
||||
@pytest.mark.parametrize(
|
||||
'query_block_start, query_block_end, query_match_count',
|
||||
[
|
||||
(None, 42, 0),
|
||||
(1, 42, 0),
|
||||
(420000, 420001, 1),
|
||||
(419999, 419999, 1), # matches are inclusive
|
||||
(419999, 420000, 2),
|
||||
@ -211,7 +212,7 @@ def test_query_process_txs_data(
|
||||
query_match_count,
|
||||
):
|
||||
|
||||
path_info = '/txa'
|
||||
path_info = '/txa/100/0'
|
||||
if query_block_start != None:
|
||||
path_info += '/' + str(query_block_start)
|
||||
if query_block_end != None:
|
||||
@ -227,4 +228,5 @@ def test_query_process_txs_data(
|
||||
assert r != None
|
||||
|
||||
o = json.loads(r[1])
|
||||
logg.debug('oo {}'.format(o))
|
||||
assert len(o['data']) == query_match_count
|
||||
|
@ -1,5 +1,5 @@
|
||||
celery==4.4.7
|
||||
erc20-demurrage-token~=0.0.5a3
|
||||
cic-eth-registry~=0.6.1a6
|
||||
chainlib~=0.0.9rc1
|
||||
cic_eth~=0.12.4a11
|
||||
erc20-demurrage-token~=0.0.6
|
||||
cic-eth-registry~=0.6.3
|
||||
chainlib~=0.0.14
|
||||
cic_eth~=0.12.6
|
||||
|
@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = cic-eth-aux-erc20-demurrage-token
|
||||
version = 0.0.2a7
|
||||
version = 0.0.3
|
||||
description = cic-eth tasks supporting erc20 demurrage token
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
@ -1,5 +1,4 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
hexathon~=0.0.1a8
|
||||
chainqueue>=0.0.4a6,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
hexathon~=0.1.0
|
||||
chainqueue~=0.0.6a4
|
||||
eth-erc20~=0.1.5
|
||||
|
@ -63,22 +63,32 @@ class Config(BaseConfig):
|
||||
config.get('REDIS_HOST'),
|
||||
config.get('REDIS_PORT'),
|
||||
)
|
||||
db = getattr(args, 'redis_db', None)
|
||||
if db != None:
|
||||
db = str(db)
|
||||
|
||||
redis_url = (
|
||||
'redis',
|
||||
hostport,
|
||||
getattr(args, 'redis_db', None),
|
||||
db,
|
||||
)
|
||||
|
||||
|
||||
celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL'))
|
||||
hostport = urlhostmerge(
|
||||
celery_config_url[1],
|
||||
getattr(args, 'celery_host', None),
|
||||
getattr(args, 'celery_port', None),
|
||||
)
|
||||
db = getattr(args, 'redis_db', None)
|
||||
if db != None:
|
||||
db = str(db)
|
||||
celery_arg_url = (
|
||||
getattr(args, 'celery_scheme', None),
|
||||
hostport,
|
||||
getattr(args, 'celery_db', None),
|
||||
db,
|
||||
)
|
||||
|
||||
celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url)
|
||||
celery_url_string = urllib.parse.urlunsplit(celery_url)
|
||||
local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string
|
||||
|
@ -22,7 +22,7 @@ from hexathon import (
|
||||
from chainqueue.error import NotLocalTxError
|
||||
from eth_erc20 import ERC20
|
||||
from chainqueue.sql.tx import cache_tx_dict
|
||||
from okota.token_index import to_identifier
|
||||
from okota.token_index.index import to_identifier
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
@ -46,13 +46,14 @@ from cic_eth.task import (
|
||||
from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||
from cic_eth.encode import tx_normalize
|
||||
from cic_eth.eth.trust import verify_proofs
|
||||
from cic_eth.error import SignerError
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalWeb3Task)
|
||||
def balance(tokens, holder_address, chain_spec_dict):
|
||||
@celery_app.task(bind=True, base=CriticalWeb3Task)
|
||||
def balance(self, tokens, holder_address, chain_spec_dict):
|
||||
"""Return token balances for a list of tokens for given address
|
||||
|
||||
:param tokens: Token addresses
|
||||
@ -71,8 +72,9 @@ def balance(tokens, holder_address, chain_spec_dict):
|
||||
for t in tokens:
|
||||
address = t['address']
|
||||
logg.debug('address {} {}'.format(address, holder_address))
|
||||
gas_oracle = self.create_gas_oracle(rpc, min_price=self.min_fee_price)
|
||||
token = ERC20Token(chain_spec, rpc, add_0x(address))
|
||||
c = ERC20(chain_spec)
|
||||
c = ERC20(chain_spec, gas_oracle=gas_oracle)
|
||||
o = c.balance_of(address, holder_address, sender_address=caller_address)
|
||||
r = rpc.do(o)
|
||||
t['balance_network'] = c.parse_balance(r)
|
||||
|
@ -92,7 +92,7 @@ def apply_gas_value_cache_local(address, method, value, tx_hash, session=None):
|
||||
|
||||
if o == None:
|
||||
o = GasCache(address, method, value, tx_hash)
|
||||
elif tx.gas_used > o.value:
|
||||
elif value > o.value:
|
||||
o.value = value
|
||||
o.tx_hash = strip_0x(tx_hash)
|
||||
|
||||
|
@ -25,12 +25,14 @@ logg = logging.getLogger()
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
|
||||
class BaseTask(celery.Task):
|
||||
|
||||
session_func = SessionBase.create_session
|
||||
call_address = ZERO_ADDRESS
|
||||
trusted_addresses = []
|
||||
min_fee_price = 1
|
||||
min_fee_limit = 30000
|
||||
default_token_address = None
|
||||
default_token_symbol = None
|
||||
default_token_name = None
|
||||
@ -42,7 +44,7 @@ class BaseTask(celery.Task):
|
||||
if address == None:
|
||||
return RPCGasOracle(
|
||||
conn,
|
||||
code_callback=kwargs.get('code_callback'),
|
||||
code_callback=kwargs.get('code_callback', self.get_min_fee_limit),
|
||||
min_price=self.min_fee_price,
|
||||
id_generator=kwargs.get('id_generator'),
|
||||
)
|
||||
@ -56,6 +58,10 @@ class BaseTask(celery.Task):
|
||||
)
|
||||
|
||||
|
||||
def get_min_fee_limit(self, code):
|
||||
return self.min_fee_limit
|
||||
|
||||
|
||||
def create_session(self):
|
||||
return BaseTask.session_func()
|
||||
|
||||
|
@ -11,13 +11,6 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
@ -40,8 +33,6 @@ RUN chmod 755 *.sh
|
||||
# # they can all be overridden by environment variables
|
||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
#COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||
@ -66,8 +57,7 @@ ENTRYPOINT []
|
||||
## # they can all be overridden by environment variables
|
||||
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
#COPY scripts/ scripts/
|
||||
#
|
||||
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
|
@ -1,4 +1,7 @@
|
||||
celery==4.4.7
|
||||
chainlib-eth>=0.0.10a20,<0.1.0
|
||||
semver==2.13.0
|
||||
urlybird~=0.0.1a2
|
||||
chainlib-eth~=0.0.15
|
||||
urlybird~=0.0.1
|
||||
cic-eth-registry~=0.6.6
|
||||
cic-types~=0.2.1a8
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.3
|
||||
|
@ -1,16 +1,15 @@
|
||||
chainqueue>=0.0.6a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.7a3,<0.1.0
|
||||
chainqueue~=0.0.6a4
|
||||
chainsyncer[sql]~=0.0.7
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
confini~=0.5.3
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
hexathon~=0.1.0
|
||||
pycryptodome==3.10.1
|
||||
liveness~=0.0.1a7
|
||||
eth-address-index>=0.2.4a1,<0.3.0
|
||||
eth-accounts-index>=0.1.2a3,<0.2.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
moolb~=0.1.1b2
|
||||
okota>=0.2.4a6,<0.3.0
|
||||
eth-address-index~=0.2.4
|
||||
eth-accounts-index~=0.1.2
|
||||
erc20-faucet~=0.3.2
|
||||
erc20-transfer-authorization~=0.3.6
|
||||
sarafu-faucet~=0.0.7
|
||||
moolb~=0.2.0
|
||||
okota~=0.2.5
|
||||
|
@ -1,7 +1,7 @@
|
||||
[metadata]
|
||||
name = cic-eth
|
||||
#version = attr: cic_eth.version.__version_string__
|
||||
version = 0.12.5a2
|
||||
version = 0.12.7
|
||||
description = CIC Network Ethereum interaction
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
@ -6,4 +6,5 @@ pytest-redis==2.0.0
|
||||
redis==3.5.3
|
||||
eth-tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
eth-erc20~=0.1.2a2
|
||||
eth-erc20~=0.1.5
|
||||
erc20-transfer-authorization~=0.3.6
|
||||
|
@ -40,6 +40,7 @@ def test_filter_gas(
|
||||
foo_token,
|
||||
token_registry,
|
||||
register_lookups,
|
||||
register_tokens,
|
||||
celery_session_worker,
|
||||
cic_registry,
|
||||
):
|
||||
@ -69,7 +70,7 @@ def test_filter_gas(
|
||||
tx = Tx(tx_src, block=block)
|
||||
tx.apply_receipt(rcpt)
|
||||
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||
assert t == None
|
||||
assert t.get() == None
|
||||
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
|
||||
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||
|
@ -288,7 +288,6 @@ def test_fix_nonce(
|
||||
|
||||
init_database.commit()
|
||||
|
||||
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
|
||||
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
|
||||
ks = txs.keys()
|
||||
assert len(ks) == 2
|
||||
|
@ -191,11 +191,17 @@ def test_tokens(
|
||||
break
|
||||
|
||||
api_param = str(uuid.uuid4())
|
||||
fp = os.path.join(CallbackTask.mmap_path, api_param)
|
||||
f = open(fp, 'wb+')
|
||||
f.write(b'\x00')
|
||||
f.close()
|
||||
|
||||
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
|
||||
t = api.tokens(['BAR'], proof=[[bar_token_declaration]])
|
||||
r = t.get()
|
||||
logg.debug('rr {} {}'.format(r, t.children))
|
||||
|
||||
|
||||
while True:
|
||||
fp = os.path.join(CallbackTask.mmap_path, api_param)
|
||||
try:
|
||||
|
@ -35,10 +35,26 @@ from hexathon import strip_0x
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.error import OutOfGasError
|
||||
from cic_eth.queue.tx import queue_create
|
||||
from cic_eth.task import BaseTask
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def test_task_gas_limit(
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
default_chain_spec,
|
||||
agent_roles,
|
||||
celery_session_worker,
|
||||
):
|
||||
rpc = RPCConnection.connect(default_chain_spec, 'default')
|
||||
gas_oracle = BaseTask().create_gas_oracle(rpc)
|
||||
c = Gas(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle)
|
||||
(tx_hash_hex, o) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 10, tx_format=TxFormat.RLP_SIGNED)
|
||||
tx = unpack(bytes.fromhex(strip_0x(o)), default_chain_spec)
|
||||
assert (tx['gas'], BaseTask.min_fee_price)
|
||||
|
||||
|
||||
def test_task_check_gas_ok(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
|
@ -143,7 +143,7 @@ def test_incoming_balance(
|
||||
'converters': [],
|
||||
}
|
||||
b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
|
||||
assert b[0]['balance_incoming'] == 0
|
||||
assert b[0]['balance_incoming'] == 1000
|
||||
|
||||
otx.readysend(session=init_database)
|
||||
init_database.flush()
|
||||
@ -152,8 +152,8 @@ def test_incoming_balance(
|
||||
otx.sent(session=init_database)
|
||||
init_database.commit()
|
||||
|
||||
b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
|
||||
assert b[0]['balance_incoming'] == 1000
|
||||
#b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
|
||||
#assert b[0]['balance_incoming'] == 1000
|
||||
|
||||
otx.success(block=1024, session=init_database)
|
||||
init_database.commit()
|
||||
|
@ -1,7 +1,5 @@
|
||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||
chainqueue>=0.0.5a3,<0.1.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
chainqueue~=0.0.6a4
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
hexathon~=0.1.0
|
||||
pycryptodome==3.10.1
|
||||
pyxdg==0.27
|
||||
|
@ -9,7 +9,7 @@ import semver
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
version = (0, 4, 0, 'alpha.11')
|
||||
version = (0, 4, 0, 'alpha.12')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
@ -1,4 +1,4 @@
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
confini~=0.5.1
|
||||
africastalking==1.2.3
|
||||
SQLAlchemy==1.3.20
|
||||
alembic==1.4.2
|
||||
|
@ -1,7 +1,7 @@
|
||||
# standard imports
|
||||
import semver
|
||||
|
||||
version = (0, 3, 1, 'alpha.6')
|
||||
version = (0, 3, 1, 'alpha.7')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
@ -14,13 +14,6 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
|
@ -4,10 +4,11 @@ billiard==3.6.4.0
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
cffi==1.14.6
|
||||
cic-eth~=0.12.5a1
|
||||
cic-notify~=0.4.0a11
|
||||
cic-types~=0.2.1a7
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
cic-eth~=0.12.6
|
||||
cic-notify~=0.4.0a12
|
||||
cic-types~=0.2.1a8
|
||||
confini~=0.5.2
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.3
|
||||
phonenumbers==8.12.12
|
||||
psycopg2==2.8.6
|
||||
python-i18n[YAML]==0.3.9
|
||||
|
@ -41,17 +41,17 @@ add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
#advance_nonce
|
||||
#debug_rpc
|
||||
#>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
|
||||
#DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG`
|
||||
#
|
||||
#
|
||||
#>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
|
||||
#advance_nonce
|
||||
#debug_rpc
|
||||
#r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
|
||||
#add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy token index contract
|
||||
@ -66,6 +66,13 @@ debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
# Assign writer for token index
|
||||
>&2 echo -e "\033[;96mEnable token index writer $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER to write to accounts index contract at $DEV_TOKEN_INDEX_ADDRESS\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-accounts-index-writer -s -u -i $CHAIN_SPEC -p $RPC_PROVIDER --nonce $nonce --fee-limit 1000000 -e $DEV_TOKEN_INDEX_ADDRESS $DEV_DEBUG_FLAG $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
check_wait 2
|
||||
|
||||
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
|
@ -61,7 +61,7 @@ function deploy_token_erc20_demurrage_token() {
|
||||
_deploy_token_defaults "DET" "Demurrage Token"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
|
||||
TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG -ww -s`
|
||||
}
|
||||
|
||||
function deploy_accounts_index() {
|
||||
|
@ -28,15 +28,6 @@ RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY override_requirements.txt .
|
||||
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
--force-reinstall \
|
||||
--no-cache \
|
||||
-r override_requirements.txt
|
||||
|
||||
RUN pip freeze
|
||||
|
||||
COPY . .
|
||||
|
@ -1 +0,0 @@
|
||||
chainlib-eth==0.0.10a15
|
@ -1,13 +1,12 @@
|
||||
cic-eth[tools]==0.12.5a2
|
||||
chainlib-eth>=0.0.10a17,<0.1.0
|
||||
eth-erc20>=0.1.2a3,<0.2.0
|
||||
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
||||
eth-address-index>=0.2.4a1,<0.3.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
confini>=0.4.2rc3,<1.0.0
|
||||
eth-token-index>=0.2.4a1,<=0.3.0
|
||||
okota>=0.2.4a15,<0.3.0
|
||||
cic-types~=0.2.1a2
|
||||
cic-eth[tools]==0.12.7
|
||||
cic-types~=0.2.1a8
|
||||
chainlib-eth~=0.0.15
|
||||
eth-erc20~=0.1.5
|
||||
erc20-demurrage-token~=0.0.7
|
||||
eth-address-index~=0.2.4
|
||||
cic-eth-registry~=0.6.5
|
||||
erc20-faucet==0.3.2
|
||||
sarafu-faucet==0.0.7
|
||||
confini~=0.5.3
|
||||
eth-token-index==0.2.4
|
||||
okota==0.2.5a1
|
||||
|
@ -18,6 +18,7 @@ from eth_address_declarator.declarator import AddressDeclarator
|
||||
from funga.eth.signer import EIP155Signer
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
from hexathon import add_0x, strip_0x
|
||||
from okota.token_index.index import to_identifier
|
||||
|
||||
# local imports
|
||||
|
||||
@ -109,6 +110,7 @@ if __name__ == '__main__':
|
||||
identifier = bytes.fromhex(hashed_token_proof)
|
||||
token_immutable_proof_writer = MetadataRequestsHandler(cic_type=MetadataPointer.NONE, identifier=identifier)
|
||||
write_metadata(token_immutable_proof_writer, token_proof_data)
|
||||
logg.debug(f'Writing hashed proof: {hashed_token_proof}')
|
||||
write_to_declarator(contract_address=args.address_declarator,
|
||||
contract_wrapper=contract_wrapper,
|
||||
proof=hashed_token_proof,
|
||||
@ -116,12 +118,11 @@ if __name__ == '__main__':
|
||||
signer_address=args.signer_address,
|
||||
token_address=args.e)
|
||||
|
||||
hashed_token_proof = hash_proof(args.token_symbol.encode('utf-8'))
|
||||
identifier = bytes.fromhex(hashed_token_proof)
|
||||
token_immutable_proof_writer = MetadataRequestsHandler(cic_type=MetadataPointer.NONE, identifier=identifier)
|
||||
hashed_token_proof = to_identifier(args.token_symbol)
|
||||
logg.debug(f'Writing hashed proof: {hashed_token_proof}')
|
||||
write_to_declarator(contract_address=args.address_declarator,
|
||||
contract_wrapper=contract_wrapper,
|
||||
proof=identifier,
|
||||
proof=hashed_token_proof,
|
||||
rpc=rpc,
|
||||
signer_address=args.signer_address,
|
||||
token_address=args.e)
|
||||
|
@ -1,48 +1,37 @@
|
||||
# standard imports
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import time
|
||||
import argparse
|
||||
import sys
|
||||
import re
|
||||
import hashlib
|
||||
import csv
|
||||
import json
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
)
|
||||
from chainsyncer.backend.memory import MemBackend
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
)
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
)
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from chainlib.eth.error import (
|
||||
RequestMismatchException,
|
||||
)
|
||||
from chainlib.eth.gas import OverrideGasOracle
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.tx import TxFactory
|
||||
from chainlib.jsonrpc import JSONRPCRequest
|
||||
from chainlib.eth.error import (
|
||||
EthException,
|
||||
RequestMismatchException,
|
||||
)
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from cic_types.models.person import Person
|
||||
from eth_erc20 import ERC20
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainsyncer.backend.memory import MemBackend
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from cic_eth.cli.chain import chain_interface
|
||||
from cic_types.models.person import Person
|
||||
from eth_accounts_index import AccountsIndex
|
||||
from eth_contract_registry import Registry
|
||||
from eth_erc20 import ERC20
|
||||
from eth_token_index import TokenUniqueSymbolIndex
|
||||
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
from funga.eth.signer import EIP155Signer
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from funga.eth.signer import EIP155Signer
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
@ -9,8 +9,8 @@ from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from confini import Config
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from funga.eth.signer import EIP155Signer
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
|
||||
# local imports
|
||||
from import_util import BalanceProcessor, get_celery_worker_status
|
||||
|
@ -33,8 +33,8 @@ from chainlib.eth.error import (
|
||||
RequestMismatchException,
|
||||
)
|
||||
from chainlib.chain import ChainSpec
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from funga.eth.signer import EIP155Signer
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
from cic_types.models.person import Person
|
||||
from eth_erc20 import ERC20
|
||||
from cic_eth.cli.chain import chain_interface
|
||||
|
@ -27,9 +27,9 @@ from cic_types.processor import generate_metadata_pointer
|
||||
from cic_types import MetadataPointer
|
||||
from eth_accounts_index.registry import AccountRegistry
|
||||
from eth_contract_registry import Registry
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict
|
||||
from funga.eth.keystore.dict import DictKeystore
|
||||
from funga.eth.signer.defaultsigner import EIP155Signer
|
||||
from funga.eth.keystore.keyfile import to_dict as to_keyfile_dict
|
||||
|
||||
# local imports
|
||||
from common.dirs import initialize_dirs
|
||||
|
@ -1,15 +1,15 @@
|
||||
sarafu-faucet~=0.0.7a2
|
||||
cic-eth[tools]~=0.12.4a13
|
||||
cic-types~=0.2.1a2
|
||||
funga>=0.5.1a1,<=0.5.15
|
||||
cic-eth[tools]~=0.12.5a11
|
||||
cic-types~=0.2.1a8
|
||||
funga>=0.5.1
|
||||
faker==4.17.1
|
||||
chainsyncer~=0.0.7a3
|
||||
chainlib-eth~=0.0.10a18
|
||||
chainlib-eth~=0.0.15
|
||||
eth-address-index~=0.2.4a1
|
||||
eth-contract-registry~=0.6.3a3
|
||||
eth-accounts-index~=0.1.2a3
|
||||
eth-erc20~=0.1.2a3
|
||||
eth-erc20==0.1.4
|
||||
erc20-faucet~=0.3.2a2
|
||||
psycopg2==2.8.6
|
||||
liveness~=0.0.1a7
|
||||
confini>=0.4.2rc3,<0.5.0
|
||||
confini>=0.5.2
|
||||
|
@ -68,7 +68,11 @@ admin_tests = [
|
||||
'local_key',
|
||||
]
|
||||
|
||||
all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests
|
||||
cache_tests = [
|
||||
'cache_tx_user',
|
||||
]
|
||||
|
||||
all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests + cache_tests
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
@ -77,11 +81,14 @@ argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', defa
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
||||
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
|
||||
argparser.add_argument('--cache-provider', type=str, dest='cache_provider', default='http://localhost:63313', help='cic-cache url')
|
||||
argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications')
|
||||
argparser.add_argument('--skip-ussd', dest='skip_ussd', action='store_true', help='skip all ussd verifications')
|
||||
argparser.add_argument('--skip-metadata', dest='skip_metadata', action='store_true', help='skip all metadata verifications')
|
||||
argparser.add_argument('--skip-cache', dest='skip_cache', action='store_true', help='skip all cache verifications')
|
||||
argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification')
|
||||
argparser.add_argument('--include', action='append', type=str, help='include specified verification')
|
||||
argparser.add_argument('--list-verifications', action='store_true', help='print a list of verification check identifiers')
|
||||
argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
|
||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
@ -115,6 +122,7 @@ config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
config.add(args.meta_provider, '_META_PROVIDER', True)
|
||||
config.add(args.ussd_provider, '_USSD_PROVIDER', True)
|
||||
config.add(args.cache_provider, '_CACHE_PROVIDER', True)
|
||||
|
||||
token_symbol = args.token_symbol
|
||||
|
||||
@ -351,6 +359,24 @@ class Verifier:
|
||||
raise VerifierError(o_retrieved, 'metadata (person)')
|
||||
|
||||
|
||||
def verify_cache_tx_user(self, address, balance=None):
|
||||
url = os.path.join(config.get('_CACHE_PROVIDER'), 'txa', 'user', address)
|
||||
req = urllib.request.Request(url)
|
||||
req.add_header('X_CIC_CACHE_MODE', 'all')
|
||||
try:
|
||||
res = urllib.request.urlopen(req)
|
||||
except urllib.error.HTTPError as e:
|
||||
raise VerifierError(
|
||||
'({}) {}'.format(url, e),
|
||||
'cache (tx user)',
|
||||
)
|
||||
r = json.load(res)
|
||||
if len(r['data']) == 0:
|
||||
raise VerifierError('empty tx list for address {}'.format(address), 'cache (tx user)')
|
||||
for tx in r['data']:
|
||||
logg.warning('found tx {} for {} but not checking validity'.format(tx['tx_hash'], address))
|
||||
|
||||
|
||||
def verify_metadata_phone(self, address, balance=None):
|
||||
upper_address = strip_0x(address).upper()
|
||||
f = open(os.path.join(
|
||||
@ -397,11 +423,13 @@ class Verifier:
|
||||
if m != 'CON Welcome':
|
||||
raise VerifierError(response_data, 'ussd')
|
||||
|
||||
|
||||
def verify_ussd_pins(self, address, balance):
|
||||
response_data = send_ussd_request(address, self.data_dir)
|
||||
if response_data[:11] != 'CON Balance' and response_data[:9] != 'CON Salio':
|
||||
raise VerifierError(response_data, 'pins')
|
||||
|
||||
|
||||
def verify(self, address, balance, debug_stem=None):
|
||||
|
||||
for k in active_tests:
|
||||
|
18
apps/helpers/demurrage_token_poke/poke.sh
Normal file
18
apps/helpers/demurrage_token_poke/poke.sh
Normal file
@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
default_token_address=`eth-contract-registry-list --raw -e $CIC_REGISTRY_ADDRESS DefaultToken`
|
||||
export TOKEN_ADDRESSES=${TOKEN_ADDRESSES:-$default_token_address}
|
||||
|
||||
IFS="," read -r -a token_addresses <<< $TOKEN_ADDRESSES
|
||||
export RPC_VERIFY=1
|
||||
|
||||
for token_address in ${token_addresses[@]}; do
|
||||
>&2 echo checking token address $token_address
|
||||
t=`eth-encode --signature demurrageTimestamp -e $token_address --notx`
|
||||
v=`eth-encode --signature demurrageAmount -e $token_address --notx`
|
||||
>&2 echo last demurrage apply call for $token_address was value $v at $t
|
||||
if [ "$?" -eq 0 ]; then
|
||||
h=`eth-encode --signature applyDemurrage -i $CHAIN_SPEC -y $WALLET_KEY_FILE -e $token_address --fee-limit 8000000 -s -ww`
|
||||
>&2 echo applied demurrage on $token_address tx hash $h
|
||||
fi
|
||||
done
|
@ -153,7 +153,7 @@ services:
|
||||
SIGNER_PROVIDER: ${SIGNER_PROVIDER:-http://cic-signer:8000}
|
||||
SIGNER_SECRET: ${SIGNER_SECRET:-deadbeef}
|
||||
TASKS_TRACE_QUEUE_STATUS: ${TASKS_TRACE_QUEUE_STATUS:-1}
|
||||
ETH_MIN_FEE_PRICE: $ETH_MIN_FEE_PRICE
|
||||
ETH_MIN_FEE_PRICE: ${ETH_MIN_FEE_PRICE:-1000000000}
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- evm
|
||||
@ -203,7 +203,7 @@ services:
|
||||
SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5}
|
||||
SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1}
|
||||
SYNCER_OFFSET: ${SYNCER_OFFSET:-0}
|
||||
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-"cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback"}
|
||||
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback}
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- evm
|
||||
@ -342,7 +342,7 @@ services:
|
||||
SYNCER_LOOP_INTERVAL: ${SYNCER_LOOP_INTERVAL:-5}
|
||||
SYNCER_OFFSET: ${SYNCER_OFFSET:-0}
|
||||
SYNCER_NO_HISTORY: ${SYNCER_NO_HISTORY:-1}
|
||||
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-"cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback"}
|
||||
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback}
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- evm
|
||||
@ -497,7 +497,7 @@ services:
|
||||
dockerfile: docker/Dockerfile
|
||||
args:
|
||||
PIP_INDEX_URL: ${PIP_INDEX_URL:-https://pypi.org/simple}
|
||||
EXTRA_PIP_INDEX_URL: ${EXTRA_PIP_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
|
||||
EXTRA_PIP_INDEX_URL: ${EXTRA_PIP_INDEX_URL:-https://pip.grassrootseconomics.net}
|
||||
EXTRA_PIP_ARGS: $EXTRA_PIP_ARGS
|
||||
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
|
||||
environment:
|
||||
|
Loading…
Reference in New Issue
Block a user