Compare commits

..

101 Commits

Author SHA1 Message Date
nolash
2ca865e946 Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-12-22 17:06:37 +00:00
nolash
6b3699471b Revert "Merge branch 'lash/verify-cache' into lash/bloxberg-seeding"
This reverts commit 99b0fb5aed, reversing
changes made to 58e766aa58.
2021-11-04 06:08:16 +01:00
nolash
99b0fb5aed Merge branch 'lash/verify-cache' into lash/bloxberg-seeding 2021-11-04 04:26:50 +01:00
nolash
29423449b7 Merge remote-tracking branch 'origin/master' into lash/verify-cache 2021-11-04 04:23:47 +01:00
nolash
58e766aa58
Remove explicit config in db migration 2021-11-04 04:18:27 +01:00
nolash
2ebcd3e3de Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-11-02 18:49:49 +01:00
nolash
c440b049cc
Add config dirs 2021-11-02 16:35:44 +01:00
nolash
09034af5bc
Bump cic-eth version 2021-11-02 16:03:29 +01:00
nolash
dc80bae673
Upgrade cic-eth in migrations 2021-11-02 15:31:00 +01:00
nolash
d88ae00b72
Add celery cli args with defaults from redis 2021-10-31 07:58:35 +01:00
nolash
7a366edb9d
WIP rehabilitate cic-eth-inspect 2021-10-30 19:09:17 +02:00
nolash
0b912b99b6
Add role listing to cic-eth tag cli tool 2021-10-30 13:19:31 +02:00
nolash
cbd4aef004
Add action confirm on sweep script 2021-10-30 10:25:39 +02:00
nolash
6f7f91780b
Add script to sweep gas from signer accounts 2021-10-30 09:02:04 +02:00
nolash
83ecdaf023
Connect token filter to tracker 2021-10-29 16:35:11 +02:00
nolash
e2ef9b43c8
Reactivate cic-eth-tasker dependency for bootstrap 2021-10-29 15:58:34 +02:00
nolash
6e58e4e4de
Remove nasty residue from bootstrap 2021-10-29 14:40:06 +02:00
nolash
f46c9b0e7d Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-10-29 11:39:40 +02:00
nolash
6ca3fd55d7
Add gas cache oracle connection for erc20 2021-10-29 08:45:42 +02:00
nolash
258ed420b8 Merge branch 'lash/tmp-bloxberg-seeding' into lash/bloxberg-seeding 2021-10-29 07:35:08 +02:00
nolash
1c022e9853
Added changes to wrong branch 2021-10-29 07:33:38 +02:00
nolash
d35e144723
Register gas cache only for registered tokens 2021-10-29 07:00:25 +02:00
nolash
fb953d0318
Add gas cache backend, test, filter 2021-10-28 21:45:47 +02:00
nolash
858bbdb69a Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-28 14:36:45 +02:00
nolash
66e23e4e20
Test config cleanup 2021-10-28 14:11:11 +02:00
nolash
546256c86a
Better gas gifting amounts and thresholds estimation, fix broken cic-eth imports 2021-10-28 13:34:39 +02:00
nolash
d9720bd0aa Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:41:27 +02:00
nolash
e9e9f66d97
Correct wrong change for docker registries 2021-10-28 05:39:44 +02:00
nolash
0d640fab57 Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:29:07 +02:00
nolash
4ce85bc824
Remove faulty default registry in dockerfiles 2021-10-28 05:27:13 +02:00
nolash
ce67f83457
Remove faulty default registry in docker compose 2021-10-28 05:24:11 +02:00
nolash
13f2e17931
Remove accidental 0 value override for syncer offset to trackers 2021-10-28 05:18:54 +02:00
nolash
f236234682 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-27 16:58:38 +02:00
nolash
1f37632f0f
WIP Replace env vars in data-seeding with well-known 2021-10-27 16:56:03 +02:00
nolash
03d7518f8c Merge branch 'lash/local-dev-improve' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/local-dev-improve 2021-10-27 11:52:31 +02:00
nolash
67152d0df1
Replace KEYSTORE_PATH with WALLET_KEY_FILE in data seeding 2021-10-27 11:51:20 +02:00
9168322941
Revert base image changes. 2021-10-27 12:41:35 +03:00
2fbd338e24
Adds correct base image. 2021-10-27 11:44:23 +03:00
c7d7f2a64d
Remove force reset. 2021-10-27 11:44:08 +03:00
16153df2f0
Resolve creation of phone dir when it already exists. 2021-10-27 11:43:35 +03:00
nolash
4391fa3aff Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-25 21:01:27 +02:00
nolash
7ce68021bd Merge remote-tracking branch 'origin/master' into lash/verify-cache 2021-10-25 20:20:40 +02:00
nolash
cd602dee49
Remove WIP docker compose file 2021-10-25 20:12:32 +02:00
nolash
a548ba6fce
Chainlib upgrade to handle none receipts, rpc node debug output in bootstrap 2021-10-25 20:09:35 +02:00
nolash
a6de7e9fe0 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-20 20:02:19 +02:00
nolash
e705a94873
Resolve notify/ussd dependency conflict 2021-10-20 10:07:19 +02:00
nolash
3923de0a81
Update pip args handling in notify 2021-10-19 23:01:55 +02:00
nolash
5c0250b5b9
Rehabilitate cic-cache db migration 2021-10-19 22:58:10 +02:00
nolash
3285d8dfe5
Implement asynchronous deploys in bootstrap 2021-10-19 22:08:17 +02:00
nolash
9d349f1579
Add debug level env var to bootstrap dev container 2021-10-19 19:54:59 +02:00
nolash
837a1770d1
Upgrade deps more chainlib in bootstrap 2021-10-19 10:10:39 +02:00
003febec9d
Bumps contract migration deps. 2021-10-19 10:38:21 +03:00
f066a32ce8
Adds libffi-dev for local git-tea. 2021-10-19 10:38:08 +03:00
nolash
ad493705ad
Upgrade deps 2021-10-18 17:16:28 +02:00
nolash
b765c4ab88
More wrestling with chainlib-eth deps 2021-10-18 17:06:31 +02:00
nolash
e4935d3b58 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 16:49:58 +02:00
nolash
f88f0e321b
Upgrade chainlib-eth dep 2021-10-18 16:48:14 +02:00
31fa721397
Add cic-notify container 2021-10-18 17:17:53 +03:00
16481da193
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 16:54:23 +03:00
97a48cd8c6
Improves ussd deps. 2021-10-18 16:53:38 +03:00
nolash
7732412341 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 15:51:38 +02:00
nolash
649b124a61
Ugprade chainqueue dep 2021-10-18 15:50:45 +02:00
7601e3eeff
Corrects breakages in cic-ussd 2021-10-18 15:19:32 +03:00
60a9efc88b
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 15:18:33 +03:00
45011b58c4
Cleans up configs. 2021-10-18 15:11:31 +03:00
nolash
f1a0b4ee7c Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 14:10:52 +02:00
nolash
c57abb7ad5
Upgrade deps in cic-eth, allow for new chain spec format 2021-10-18 14:08:39 +02:00
930a99c974
Bumps cic-types version. 2021-10-18 06:52:49 +03:00
b0935caab8
Fixes imports. 2021-10-18 06:52:28 +03:00
nolash
bdd5f6fcec
Update readme in data seeding 2021-10-17 19:37:29 +02:00
nolash
a293c2460e
Consolidate dir handling in data seeding scripts 2021-10-17 19:27:15 +02:00
nolash
0ee6400d7d
WIP rehabilitate ussd builds 2021-10-17 18:32:08 +02:00
nolash
677fb346fd
Add data seeding preparation step, rehabilitation of non-custodial seeding 2021-10-17 18:05:00 +02:00
nolash
ea3c75e755
Rehabilitate traffic script 2021-10-17 14:30:42 +02:00
nolash
0b2f22c416
Rehabilitate cic-user-server 2021-10-16 20:54:41 +02:00
nolash
24385ea27d
Rehabilitate cic-cache 2021-10-16 14:03:05 +02:00
nolash
9a154a8046
WIP rehabilitate cic-cache 2021-10-16 08:23:32 +02:00
nolash
d3576c8ec7
Add eth retrier to new docker compose file 2021-10-16 07:08:44 +02:00
nolash
79ee2bf4ff
Add eth tracker, dispatcher to new docker compose file 2021-10-16 07:04:19 +02:00
nolash
89ac70371a
Remove single function worker in test 2021-10-16 00:18:08 +02:00
nolash
5ea0318b0b
Fix default token symbol config setting for aux 2021-10-15 23:21:57 +02:00
nolash
5dfb96ec0c
Add new cic-signer app 2021-10-15 23:11:00 +02:00
nolash
4634ac41df Merge remote-tracking branch 'origin/master' into lash/split-migration 2021-10-15 22:19:01 +02:00
nolash
97f4fe8ca7
refactor docker-compose cic-eth-tasker, bootstrap (aka contract migration) 2021-10-15 22:16:45 +02:00
nolash
b36529f7fa
WIP local docker registry adaptations 2021-10-15 20:27:03 +02:00
nolash
a6675f2348
Add environment sourcing for cic-eth-tasker docker compose 2021-10-15 18:52:37 +02:00
nolash
e3116d74d6
No export 2021-10-15 12:54:16 +02:00
nolash
c0bbdc9bec
Add missing file 2021-10-15 08:43:04 +02:00
nolash
396bd4f300
update preliminary readme 2021-10-15 08:38:01 +02:00
nolash
58547b4067
Bump cic-eth-registry 2021-10-15 07:44:50 +02:00
nolash
9009815d78
Add trust address to contract migration config, get cic-eth default token from registry 2021-10-14 21:31:04 +02:00
nolash
2da19f5819
Add basic connectivity config directives 2021-10-14 17:40:53 +02:00
nolash
3948d5aa40
Add custodial initialization 2021-10-14 17:18:49 +02:00
nolash
ed432abb23
WIP refactor custodial initialization 2021-10-14 14:37:48 +02:00
nolash
f251b8b729
Remove dead code 2021-10-14 11:35:08 +02:00
nolash
36e791e08a
Split contract migration into three separate steps 2021-10-14 11:33:50 +02:00
nolash
71a7e3d3d5
Reinstate test config dir 2021-10-09 17:23:38 +02:00
nolash
335b7b30a4
Add okota dep 2021-10-09 16:40:28 +02:00
nolash
3b1f470ddf
Add empty config dir 2021-10-09 16:33:40 +02:00
nolash
4c9f20aa7f
Add explicit zero length tx lsit check for cic-cache verify 2021-10-08 11:26:09 +02:00
nolash
980191be4f
Add verify check for cache, use chainlib cli for cic-cache 2021-10-08 11:19:21 +02:00
178 changed files with 1948 additions and 5099 deletions

View File

@ -1,117 +0,0 @@
# CORE TEAM CONTRIBUTION GUIDE
# 1. Transparency
1.1 Use work logs for reflection of work done, aswell as telling your peers about changes that may affect their own tasks
1.2 A work log SHOULD be submitted after a "unit of work" is complete.
1.2.1 A "unit of work" should not span more than one full day's worth of work.
1.2.2 A "unit of work" should be small enough that the log entries give useful insight.
1.3 Individual logs are reviewed in weekly meetings
<!--1.4 Bullet point list of topics and one or more sub-points describing each item in short sentences, eg;
```
- Core
* fixed foo
* fixed bar
- Frontend
* connected bar to baz
```-->
1.4 Work log format is defined in []()
1.5 Link to issue/MR in bullet point where appropriate
1.6
# 2. Code hygiene
2.1 Keep function names and variable names short
2.2 Keep code files, functions and test fixtures short
2.3 The less magic the better. Recombinable and replaceable is king
2.4 Group imports by `standard`, `external`, `local`, `test` - in that order
2.5 Only auto-import when necessary, and always with a minimum of side-effects
2.6 Use custom errors. Let them bubble up
2.7 No logs in tight loops
2.8 Keep executable main routine minimal. Pass variables (do not use globals) in main business logic function
2.9 Test coverage MUST be kept higher than 90% after changes
2.10 Docstrings. Always. Always!
# 3. Versioning
3.1 Use [Semantic Versioning](https://semver.org/)
3.2 When merging code, explicit dependencies SHOULD NOT use pre-release version
# 4. Issues
4.1 Issue title should use [Convention Commit structure](https://www.conventionalcommits.org/en/v1.0.0-beta.2/)
4.2 Issues need proper problem statement
4.2.1. What is the current state
4.2.2. If current state is not behaving as expected, what was the expected state
4.2.3. What is the desired new state.
4.3 Issues need proper resolution statement
4.3.1. Bullet point list of short sentences describing practical steps to reach desired state
4.3.2. Builet point list of external resources informing the issue and resolution
4.4 Tasks needs to be appropriately labelled using GROUP labels.
# 5. Code submission
5.1 A branch and new MR is always created BEFORE THE WORK STARTS
5.2 An MR should solve ONE SINGLE PART of a problem
5.3 Every MR should have at least ONE ISSUE associated with it. Ideally issue can be closed when MR is merged
5.4 MRs should not be open for more than one week (during normal operation periods)
5.5 MR should ideally not be longer than 400 lines of changes of logic
5.6 MRs that MOVE or DELETE code should not CHANGE that same code in a single MR. Scope MOVEs and DELETEs in separate commits (or even better, separate MRs) for transparency
# 6. Code reviews
6.1 At least one peer review before merge
6.2 If MR is too long, evaluate whether this affects the quality of the review negatively. If it does, expect to be asked to split it up
6.3 Evaluate changes against associated issues' problem statement and proposed resolution steps. If there is a mismatch, either MR needs to change or issue needs to be amended accordingly
6.4 Make sure all technical debt introduced by MR is documented in issues. Add them according to criteria in section ISSUES if not
6.5 If CI is not working, reviewer MUST make sure code builds and runs
6.6 Behave!
6.6.1 Don't be a jerk
6.6.2 Don't block needlessly
6.6.3 Say please

View File

@ -15,5 +15,5 @@ To get started see [./apps/contract-migration/README.md](./apps/contract-migrati
## Documentation ## Documentation
[https://docs.grassecon.org/software/](https://docs.grassecon.org/software/) [https://docs.grassecon.org/cic_stack/](https://docs.grassecon.org/cic_stack/)

View File

@ -1 +1 @@
include *requirements.txt cic_cache/data/config/* cic_cache/db/migrations/default/* cic_cache/db/migrations/default/versions/* include *requirements.txt cic_cache/data/config/*

View File

@ -1,4 +1,4 @@
[cic] [cic]
registry_address = registry_address =
trust_address = trust_address =
health_modules = health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas

View File

@ -3,8 +3,7 @@ engine =
driver = driver =
host = host =
port = port =
#name = cic-cache name = cic-cache
prefix =
user = user =
password = password =
debug = 0 debug = 0

View File

@ -9,26 +9,21 @@ from .list import (
tag_transaction, tag_transaction,
add_tag, add_tag,
) )
from cic_cache.db.models.base import SessionBase
logg = logging.getLogger() logg = logging.getLogger()
def dsn_from_config(config, name): def dsn_from_config(config):
scheme = config.get('DATABASE_ENGINE') scheme = config.get('DATABASE_ENGINE')
if config.get('DATABASE_DRIVER') != None: if config.get('DATABASE_DRIVER') != None:
scheme += '+{}'.format(config.get('DATABASE_DRIVER')) scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
database_name = name
if config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(config.get('DATABASE_PREFIX'), database_name)
dsn = '' dsn = ''
if config.get('DATABASE_ENGINE') == 'sqlite': if config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.poolable = False
dsn = '{}:///{}'.format( dsn = '{}:///{}'.format(
scheme, scheme,
database_name, config.get('DATABASE_NAME'),
) )
else: else:
@ -38,7 +33,7 @@ def dsn_from_config(config, name):
config.get('DATABASE_PASSWORD'), config.get('DATABASE_PASSWORD'),
config.get('DATABASE_HOST'), config.get('DATABASE_HOST'),
config.get('DATABASE_PORT'), config.get('DATABASE_PORT'),
database_name, config.get('DATABASE_NAME'),
) )
logg.debug('parsed dsn from config: {}'.format(dsn)) logg.debug('parsed dsn from config: {}'.format(dsn))
return dsn return dsn

View File

@ -5,11 +5,7 @@ import re
import base64 import base64
# external imports # external imports
from hexathon import ( from hexathon import add_0x
add_0x,
strip_0x,
)
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache.cache import ( from cic_cache.cache import (
@ -20,72 +16,27 @@ from cic_cache.cache import (
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
#logg = logging.getLogger() #logg = logging.getLogger()
re_transactions_all_bloom = r'/tx/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?' re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?' re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_transactions_all_data = r'/txa/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?' re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_default_limit = r'/defaultlimit/?'
DEFAULT_LIMIT = 100 DEFAULT_LIMIT = 100
tx_normalize = TxHexNormalizer()
def parse_query_account(r):
address = strip_0x(r[1])
#address = tx_normalize.wallet_address(address)
limit = DEFAULT_LIMIT
g = r.groups()
if len(g) > 3:
limit = int(r[4])
if limit == 0:
limit = DEFAULT_LIMIT
offset = 0
if len(g) > 4:
offset = int(r[6])
logg.debug('account query is address {} offset {} limit {}'.format(address, offset, limit))
return (address, offset, limit,)
# r is an re.Match
def parse_query_any(r):
limit = DEFAULT_LIMIT
offset = 0
block_offset = None
block_end = None
if r.lastindex != None:
if r.lastindex > 0:
limit = int(r[1])
if r.lastindex > 1:
offset = int(r[2])
if r.lastindex > 2:
block_offset = int(r[3])
if r.lastindex > 3:
block_end = int(r[4])
if block_end < block_offset:
raise ValueError('cart before the horse, dude')
logg.debug('data query is offset {} limit {} block_offset {} block_end {}'.format(offset, limit, block_offset, block_end))
return (offset, limit, block_offset, block_end,)
def process_default_limit(session, env):
r = re.match(re_default_limit, env.get('PATH_INFO'))
if not r:
return None
return ('application/json', str(DEFAULT_LIMIT).encode('utf-8'),)
def process_transactions_account_bloom(session, env): def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO')) r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r: if not r:
return None return None
logg.debug('match account bloom')
(address, offset, limit,) = parse_query_account(r) address = r[1]
if r[2] == None:
address = add_0x(address)
offset = 0
if r.lastindex > 2:
offset = r[4]
limit = DEFAULT_LIMIT
if r.lastindex > 4:
limit = r[6]
c = BloomCache(session) c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
@ -108,9 +59,13 @@ def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO')) r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r: if not r:
return None return None
logg.debug('match all bloom')
(limit, offset, block_offset, block_end,) = parse_query_any(r) offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session) c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
@ -133,16 +88,17 @@ def process_transactions_all_data(session, env):
r = re.match(re_transactions_all_data, env.get('PATH_INFO')) r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
if not r: if not r:
return None return None
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all': if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
# return None return None
logg.debug('match all data')
logg.debug('got data request {}'.format(env)) logg.debug('got data request {}'.format(env))
block_offset = r[1]
(offset, limit, block_offset, block_end) = parse_query_any(r) block_end = r[2]
if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude')
c = DataCache(session) c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset, block_end, oldest=True) # oldest needs to be settable (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
for r in tx_cache: for r in tx_cache:
r['date_block'] = r['date_block'].timestamp() r['date_block'] = r['date_block'].timestamp()
@ -157,30 +113,3 @@ def process_transactions_all_data(session, env):
j = json.dumps(o) j = json.dumps(o)
return ('application/json', j.encode('utf-8'),) return ('application/json', j.encode('utf-8'),)
def process_transactions_account_data(session, env):
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
if not r:
return None
logg.debug('match account data')
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
# return None
(address, offset, limit,) = parse_query_account(r)
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
o = {
'low': lowest_block,
'high': highest_block,
'data': tx_cache,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)

View File

@ -12,20 +12,21 @@ import cic_cache.cli
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
from cic_cache.runnable.daemons.query import ( from cic_cache.runnable.daemons.query import (
process_default_limit,
process_transactions_account_bloom, process_transactions_account_bloom,
process_transactions_account_data,
process_transactions_all_bloom, process_transactions_all_bloom,
process_transactions_all_data, process_transactions_all_data,
) )
import cic_cache.cli
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
arg_flags = cic_cache.cli.argflag_std_read # process args
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task arg_flags = cic_cache.cli.argflag_std_base
local_arg_flags = cic_cache.cli.argflag_local_task
argparser = cic_cache.cli.ArgumentParser(arg_flags) argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags) argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args() args = argparser.parse_args()
@ -34,7 +35,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config, 'cic_cache') dsn = dsn_from_config(config)
SessionBase.connect(dsn, config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
@ -46,11 +47,9 @@ def application(env, start_response):
session = SessionBase.create_session() session = SessionBase.create_session()
for handler in [ for handler in [
process_transactions_account_data,
process_transactions_account_bloom,
process_transactions_all_data, process_transactions_all_data,
process_transactions_all_bloom, process_transactions_all_bloom,
process_default_limit, process_transactions_account_bloom,
]: ]:
r = None r = None
try: try:

View File

@ -3,7 +3,6 @@ import logging
import os import os
import sys import sys
import argparse import argparse
import tempfile
# third-party imports # third-party imports
import celery import celery
@ -29,7 +28,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config, 'cic_cache') dsn = dsn_from_config(config)
SessionBase.connect(dsn) SessionBase.connect(dsn)
# set up celery # set up celery

View File

@ -50,7 +50,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags) config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database # connect to database
dsn = dsn_from_config(config, 'cic_cache') dsn = dsn_from_config(config)
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
# set up rpc # set up rpc

View File

@ -5,7 +5,7 @@ version = (
0, 0,
2, 2,
1, 1,
'alpha.3', 'alpha.2',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis:///
result_url = redis:///

View File

@ -0,0 +1,3 @@
[cic]
registry_address =
trust_address =

View File

@ -0,0 +1,9 @@
[database]
NAME=cic_cache
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379

View File

@ -0,0 +1,3 @@
[cic]
registry_address =
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C

View File

@ -0,0 +1,9 @@
[database]
NAME=cic_cache
USER=grassroots
PASSWORD=
HOST=localhost
PORT=63432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -0,0 +1,4 @@
[syncer]
loop_interval = 1
offset = 0
no_history = 0

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -1,3 +1,4 @@
[cic] [cic]
registry_address = registry_address =
chain_spec =
trust_address = trust_address =

View File

@ -1,5 +1,5 @@
[database] [database]
PREFIX=cic-cache-test NAME=cic-cache-test
USER=postgres USER=postgres
PASSWORD= PASSWORD=
HOST=localhost HOST=localhost

View File

@ -0,0 +1,5 @@
[eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545
#chain_id =

View File

@ -1,4 +1,4 @@
openapi: "3.0.2" openapi: "3.0.3"
info: info:
title: Grassroots Economics CIC Cache title: Grassroots Economics CIC Cache
description: Cache of processed transaction data from Ethereum blockchain and worker queues description: Cache of processed transaction data from Ethereum blockchain and worker queues
@ -9,34 +9,17 @@ info:
email: will@grassecon.org email: will@grassecon.org
license: license:
name: GPLv3 name: GPLv3
version: 0.2.0 version: 0.1.0
paths: paths:
/defaultlimit: /tx/{offset}/{limit}:
summary: The default limit value of result sets. description: Bloom filter for batch of latest transactions
get:
tags:
- transactions
description:
Retrieve default limit
operationId: limit.default
responses:
200:
description: Limit query successful
content:
application/json:
schema:
$ref: "#/components/schemas/Limit"
/tx:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get: get:
tags: tags:
- transactions - transactions
description: description:
Retrieve transactions Retrieve transactions
operationId: tx.get.latest operationId: tx.get
responses: responses:
200: 200:
description: Transaction query successful. description: Transaction query successful.
@ -46,109 +29,27 @@ paths:
$ref: "#/components/schemas/BlocksBloom" $ref: "#/components/schemas/BlocksBloom"
/tx/{limit}:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.limit
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters: parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset - name: offset
in: path in: path
required: true
schema: schema:
type: integer type: integer
format: int32 format: int32
/tx/{limit}/{offset}/{block_offset}:
summary: Bloom filter for batch of transactions since a particular block.
description: Generate a bloom filter of the latest transactions since a particular block in the cache. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range.block.offset
responses:
200:
description: Transaction query successful. Results are ordered from oldest to newest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit - name: limit
in: path in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema: schema:
type: integer type: integer
format: int32 format: int32
/tx/{limit}/{offset}/{block_offset}/{block_end}: /tx/{address}/{offset}/{limit}:
summary: Bloom filter for batch of transactions within a particular block range. description: Bloom filter for batch of latest transactions by account
description: Generate a bloom filter of the latest transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get: get:
tags: tags:
- transactions - transactions
description: description:
Retrieve transactions Retrieve transactions
operationId: tx.get.latest.range.block.range operationId: tx.get
responses: responses:
200: 200:
description: Transaction query successful. description: Transaction query successful.
@ -157,49 +58,6 @@ paths:
schema: schema:
$ref: "#/components/schemas/BlocksBloom" $ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/tx/{address}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters: parameters:
- name: address - name: address
@ -207,342 +65,26 @@ paths:
required: true required: true
schema: schema:
type: string type: string
/tx/{address}/{limit}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/tx/{address}/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions by account
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset - name: offset
in: path in: path
required: true
schema: schema:
type: integer type: integer
format: int32 format: int32
/txa:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
/txa/{limit}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit - name: limit
in: path in: path
required: true
schema: schema:
type: integer type: integer
format: int32 format: int32
/txa/{limit}/{offset}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}:
summary: Cached data for transactions since a particular block.
description: Return cached data entries of transactions since a particular block. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.offset
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}/{block_end}:
summary: Cached data for transactions within a particular block range.
description: Return cached data entries of transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
/txa/{address}/{limit}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}/{limit}/{offset}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
components: components:
schemas: schemas:
Limit:
type: integer
format: int32
BlocksBloom: BlocksBloom:
type: object type: object
properties: properties:
low: low:
type: integer type: int
format: int32 format: int32
description: The lowest block number included in the filter description: The lowest block number included in the filter
high:
type: integer
format: int32
description: The highest block number included in the filter
block_filter: block_filter:
type: string type: string
format: byte format: byte
@ -555,89 +97,6 @@ components:
type: string type: string
description: Hashing algorithm (currently only using sha256) description: Hashing algorithm (currently only using sha256)
filter_rounds: filter_rounds:
type: integer type: int
format: int32 format: int32
description: Number of hash rounds used to create the filter description: Number of hash rounds used to create the filter
TransactionList:
type: object
properties:
low:
type: integer
format: int32
description: The lowest block number included in the result set
high:
type: integer
format: int32
description: The highest block number included in the filter
data:
type: array
description: Cached transaction data
items:
$ref: "#/components/schemas/Transaction"
Transaction:
type: object
properties:
block_number:
type: integer
format: int64
description: Block number transaction was included in.
tx_hash:
type: string
description: Transaction hash, in hex.
date_block:
type: integer
format: int32
description: Block timestamp.
sender:
type: string
description: Spender address, in hex.
recipient:
type: string
description: Beneficiary address, in hex.
from_value:
type: integer
format: int64
description: Value deducted from spender's balance.
to_value:
type: integer
format: int64
description: Value added to beneficiary's balance.
source_token:
type: string
description: Network address of token in which `from_value` is denominated.
destination_token:
type: string
description: Network address of token in which `to_value` is denominated.
success:
type: boolean
description: Network consensus state on whether the transaction was successful or not.
tx_type:
type: string
enum:
- erc20.faucet
- faucet.give_to
examples:
data_last:
summary: Get the latest cached transactions, using the server's default limit.
value: "/txa"
data_limit:
summary: Get the last 42 cached transactions.
value: "/txa/42"
data_range:
summary: Get the next 42 cached transactions, starting from the 13th (zero-indexed).
value: "/txa/42/13"
data_range_block_offset:
summary: Get the next 42 cached transactions, starting from block 1337 (inclusive).
value: "/txa/42/0/1337"
data_range_block_offset:
summary: Get the next 42 cached transactions within blocks 1337 and 1453 (inclusive).
value: "/txa/42/0/1337/1453"
data_range_block_range:
summary: Get the next 42 cached transactions after the 13th, within blocks 1337 and 1453 (inclusive).
value: "/txa/42/13/1337/1453"

View File

@ -4,9 +4,9 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
COPY requirements.txt . COPY requirements.txt .
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net"
ARG EXTRA_PIP_ARGS="" ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple ARG PIP_INDEX_URL="https://pypi.org/simple"
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \ pip install --index-url $PIP_INDEX_URL \
@ -14,9 +14,14 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \ --extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt -r requirements.txt
COPY . . COPY . .
RUN pip install . --extra-index-url $EXTRA_PIP_INDEX_URL
RUN python setup.py install
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-cache/
# for db migrations # for db migrations
COPY ./aux/wait-for-it/wait-for-it.sh ./ COPY ./aux/wait-for-it/wait-for-it.sh ./

View File

@ -2,5 +2,5 @@
set -e set -e
>&2 echo executing database migration >&2 echo executing database migration
python scripts/migrate_cic_cache.py --migrations-dir /usr/local/share/cic-cache/alembic -vv python scripts/migrate.py --migrations-dir /usr/local/share/cic-cache/alembic -vv
set +e set +e

View File

@ -1,15 +1,14 @@
alembic==1.4.2 alembic==1.4.2
confini~=0.5.3 confini>=0.3.6rc4,<0.5.0
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.2.0 moolb~=0.1.1b2
cic-eth-registry~=0.6.6 cic-eth-registry~=0.6.1a1
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainsyncer[sql]~=0.0.7 chainsyncer[sql]>=0.0.6a3,<0.1.0
erc20-faucet~=0.3.2 erc20-faucet>=0.3.2a2, <0.4.0
chainlib-eth~=0.0.15 chainlib-eth>=0.0.9a14,<0.1.0
eth-address-index~=0.2.4 eth-address-index>=0.2.3a4,<0.3.0
okota~=0.2.5

View File

@ -1,55 +1,54 @@
#!/usr/bin/python3 #!/usr/bin/python
# standard imports
import os import os
import argparse import argparse
import logging import logging
import re import re
# external imports
import alembic import alembic
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig
import confini import confini
# local imports
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
import cic_cache.cli
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
# BUG: the dbdir doesn't work after script install # BUG: the dbdir doesn't work after script install
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(cic_cache.__file__))) rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db') dbdir = os.path.join(rootdir, 'cic_cache', 'db')
default_migrations_dir = os.path.join(dbdir, 'migrations') migrationsdir = os.path.join(dbdir, 'migrations')
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config') configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
#config_dir = os.path.join('/usr/local/etc/cic-cache') #config_dir = os.path.join('/usr/local/etc/cic-cache')
arg_flags = cic_cache.cli.argflag_std_base argparser = argparse.ArgumentParser()
local_arg_flags = cic_cache.cli.argflag_local_sync argparser.add_argument('-c', type=str, help='config file')
argparser = cic_cache.cli.ArgumentParser(arg_flags) argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.process_local_flags(local_arg_flags) argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading') argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', '--force', action='store_true', help='force action') argparser.add_argument('-f', action='store_true', help='force action')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='migrations directory') argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
extra_args = { if args.vv:
'reset': None, logging.getLogger().setLevel(logging.DEBUG)
'force': None, elif args.v:
'migrations_dir': None, logging.getLogger().setLevel(logging.INFO)
}
# process config
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE', 'default')) config = confini.Config(configdir, args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_dir): if not os.path.isdir(migrations_dir):
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE'))) logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
migrations_dir = os.path.join(args.migrations_dir, 'default') migrations_dir = os.path.join(args.migrations_dir, 'default')
# connect to database # connect to database
dsn = dsn_from_config(config, 'cic_cache') dsn = dsn_from_config(config)
logg.info('using migrations dir {}'.format(migrations_dir)) logg.info('using migrations dir {}'.format(migrations_dir))

View File

@ -1,7 +1,6 @@
[metadata] [metadata]
name = cic-cache name = cic-cache
description = CIC Cache API and server description = CIC Cache API and server
version = 0.3.0a2
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no
url = https://gitlab.com/grassrootseconomics/cic-eth url = https://gitlab.com/grassrootseconomics/cic-eth
@ -35,7 +34,7 @@ packages =
cic_cache.runnable.daemons cic_cache.runnable.daemons
cic_cache.runnable.daemons.filters cic_cache.runnable.daemons.filters
scripts = scripts =
./scripts/migrate_cic_cache.py ./scripts/migrate.py
[options.entry_points] [options.entry_points]
console_scripts = console_scripts =

View File

@ -1,39 +1,38 @@
from setuptools import setup from setuptools import setup
# import configparser import configparser
import os import os
import time
# import time from cic_cache.version import (
version_object,
version_string
)
# from cic_cache.version import ( class PleaseCommitFirstError(Exception):
# version_object, pass
# version_string
# ) def git_hash():
# import subprocess
# class PleaseCommitFirstError(Exception): git_diff = subprocess.run(['git', 'diff'], capture_output=True)
# pass if len(git_diff.stdout) > 0:
# raise PleaseCommitFirstError()
# def git_hash(): git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
# import subprocess git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
# git_diff = subprocess.run(['git', 'diff'], capture_output=True) return git_hash_brief
# if len(git_diff.stdout) > 0:
# raise PleaseCommitFirstError() version_string = str(version_object)
# git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
# git_hash_brief = git_hash.stdout.decode('utf-8')[:8] try:
# return git_hash_brief version_git = git_hash()
# version_string += '+build.{}'.format(version_git)
# version_string = str(version_object) except FileNotFoundError:
# time_string_pair = str(time.time()).split('.')
# try: version_string += '+build.{}{:<09d}'.format(
# version_git = git_hash() time_string_pair[0],
# version_string += '+build.{}'.format(version_git) int(time_string_pair[1]),
# except FileNotFoundError: )
# time_string_pair = str(time.time()).split('.') print('final version string will be {}'.format(version_string))
# version_string += '+build.{}{:<09d}'.format(
# time_string_pair[0],
# int(time_string_pair[1]),
# )
# print('final version string will be {}'.format(version_string))
requirements = [] requirements = []
f = open('requirements.txt', 'r') f = open('requirements.txt', 'r')
@ -53,8 +52,9 @@ while True:
test_requirements.append(l.rstrip()) test_requirements.append(l.rstrip())
f.close() f.close()
setup( setup(
# version=version_string, version=version_string,
install_requires=requirements, install_requires=requirements,
tests_require=test_requirements, tests_require=test_requirements,
) )

View File

@ -7,4 +7,4 @@ pytest-celery==0.0.0a1
eth_tester==0.5.0b3 eth_tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
sarafu-faucet~=0.0.7a1 sarafu-faucet~=0.0.7a1
erc20-transfer-authorization~=0.3.6 erc20-transfer-authorization>=0.3.5a1,<0.4.0

View File

@ -6,7 +6,6 @@ import datetime
# external imports # external imports
import pytest import pytest
import moolb import moolb
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache import db from cic_cache import db
@ -43,8 +42,6 @@ def txs(
list_tokens, list_tokens,
): ):
tx_normalize = TxHexNormalizer()
session = init_database session = init_database
tx_number = 13 tx_number = 13
@ -57,10 +54,10 @@ def txs(
tx_hash_first, tx_hash_first,
list_defaults['block'], list_defaults['block'],
tx_number, tx_number,
tx_normalize.wallet_address(list_actors['alice']), list_actors['alice'],
tx_normalize.wallet_address(list_actors['bob']), list_actors['bob'],
tx_normalize.executable_address(list_tokens['foo']), list_tokens['foo'],
tx_normalize.executable_address(list_tokens['foo']), list_tokens['foo'],
1024, 1024,
2048, 2048,
True, True,
@ -77,10 +74,10 @@ def txs(
tx_hash_second, tx_hash_second,
list_defaults['block']-1, list_defaults['block']-1,
tx_number, tx_number,
tx_normalize.wallet_address(list_actors['diane']), list_actors['diane'],
tx_normalize.wallet_address(list_actors['alice']), list_actors['alice'],
tx_normalize.executable_address(list_tokens['foo']), list_tokens['foo'],
tx_normalize.wallet_address(list_tokens['foo']), list_tokens['foo'],
1024, 1024,
2048, 2048,
False, False,
@ -106,8 +103,6 @@ def more_txs(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
tx_number = 666 tx_number = 666
tx_hash = '0x' + os.urandom(32).hex() tx_hash = '0x' + os.urandom(32).hex()
tx_signed = '0x' + os.urandom(128).hex() tx_signed = '0x' + os.urandom(128).hex()
@ -120,10 +115,10 @@ def more_txs(
tx_hash, tx_hash,
list_defaults['block']+2, list_defaults['block']+2,
tx_number, tx_number,
tx_normalize.wallet_address(list_actors['alice']), list_actors['alice'],
tx_normalize.wallet_address(list_actors['diane']), list_actors['diane'],
tx_normalize.executable_address(list_tokens['bar']), list_tokens['bar'],
tx_normalize.executable_address(list_tokens['bar']), list_tokens['bar'],
2048, 2048,
4096, 4096,
False, False,

View File

@ -14,8 +14,7 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def load_config(): def load_config():
config_dir = os.path.join(root_dir, 'config/test') config_dir = os.path.join(root_dir, 'config/test')
schema_config_dir = os.path.join(root_dir, 'cic_cache', 'data', 'config') conf = confini.Config(config_dir, 'CICTEST')
conf = confini.Config(schema_config_dir, 'CICTEST', override_dirs=config_dir)
conf.process() conf.process()
logg.debug('config {}'.format(conf)) logg.debug('config {}'.format(conf))
return conf return conf

View File

@ -24,15 +24,11 @@ def database_engine(
if load_config.get('DATABASE_ENGINE') == 'sqlite': if load_config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.transactional = False SessionBase.transactional = False
SessionBase.poolable = False SessionBase.poolable = False
name = 'cic_cache'
database_name = name
if load_config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(load_config.get('DATABASE_PREFIX'), database_name)
try: try:
os.unlink(database_name) os.unlink(load_config.get('DATABASE_NAME'))
except FileNotFoundError: except FileNotFoundError:
pass pass
dsn = dsn_from_config(load_config, name) dsn = dsn_from_config(load_config)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
return dsn return dsn

View File

@ -14,7 +14,7 @@ def test_api_all_data(
): ):
env = { env = {
'PATH_INFO': '/txa/100/0/410000/420000', 'PATH_INFO': '/txa/410000/420000',
'HTTP_X_CIC_CACHE_MODE': 'all', 'HTTP_X_CIC_CACHE_MODE': 'all',
} }
j = process_transactions_all_data(init_database, env) j = process_transactions_all_data(init_database, env)
@ -23,7 +23,7 @@ def test_api_all_data(
assert len(o['data']) == 2 assert len(o['data']) == 2
env = { env = {
'PATH_INFO': '/txa/100/0/420000/410000', 'PATH_INFO': '/txa/420000/410000',
'HTTP_X_CIC_CACHE_MODE': 'all', 'HTTP_X_CIC_CACHE_MODE': 'all',
} }

View File

@ -6,7 +6,6 @@ import json
# external imports # external imports
import pytest import pytest
from chainlib.encode import TxHexNormalizer
# local imports # local imports
from cic_cache import db from cic_cache import db
@ -63,8 +62,6 @@ def test_cache_ranges(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1 oldest = list_defaults['block'] - 1
mid = list_defaults['block'] mid = list_defaults['block']
newest = list_defaults['block'] + 2 newest = list_defaults['block'] + 2
@ -103,39 +100,32 @@ def test_cache_ranges(
assert b[1] == mid assert b[1] == mid
# now check when supplying account # now check when supplying account
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account(list_actors['alice'], 0, 100)
b = c.load_transactions_account(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['bob']) b = c.load_transactions_account(list_actors['bob'], 0, 100)
b = c.load_transactions_account(account, 0, 100)
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
account = tx_normalize.wallet_address(list_actors['diane']) b = c.load_transactions_account(list_actors['diane'], 0, 100)
b = c.load_transactions_account(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
# add block filter to the mix # add block filter to the mix
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['bob']) b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
account = tx_normalize.wallet_address(list_actors['diane']) b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest assert b[0] == oldest
assert b[1] == oldest assert b[1] == oldest
@ -150,8 +140,6 @@ def test_cache_ranges_data(
session = init_database session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1 oldest = list_defaults['block'] - 1
mid = list_defaults['block'] mid = list_defaults['block']
newest = list_defaults['block'] + 2 newest = list_defaults['block'] + 2
@ -215,8 +203,7 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
# now check when supplying account # now check when supplying account
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 3 assert len(b[2]) == 3
@ -224,15 +211,13 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
assert b[2][2]['tx_hash'] == more_txs[2] assert b[2][2]['tx_hash'] == more_txs[2]
account = tx_normalize.wallet_address(list_actors['bob']) b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
assert len(b[2]) == 1 assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1] assert b[2][0]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['diane']) b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
b = c.load_transactions_account_with_data(account, 0, 100)
assert b[0] == oldest assert b[0] == oldest
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
@ -240,31 +225,27 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[2] assert b[2][1]['tx_hash'] == more_txs[2]
# add block filter to the mix # add block filter to the mix
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0] assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['alice']) b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == newest assert b[1] == newest
assert len(b[2]) == 2 assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0] assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1] assert b[2][1]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['bob']) b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid assert b[0] == mid
assert b[1] == mid assert b[1] == mid
assert len(b[2]) == 1 assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1] assert b[2][0]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['diane']) b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest assert b[0] == oldest
assert b[1] == oldest assert b[1] == oldest
assert len(b[2]) == 1 assert len(b[2]) == 1

View File

@ -82,7 +82,7 @@ def test_query_regex(
[ [
('alice', None, None, [(420000, 13), (419999, 42)]), ('alice', None, None, [(420000, 13), (419999, 42)]),
('alice', None, 1, [(420000, 13)]), ('alice', None, 1, [(420000, 13)]),
('alice', 1, 1, [(419999, 42)]), # 420000 == list_defaults['block'] ('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
('alice', 2, None, []), # 420000 == list_defaults['block'] ('alice', 2, None, []), # 420000 == list_defaults['block']
], ],
) )
@ -107,11 +107,10 @@ def test_query_process_txs_account(
path_info = '/tx/user/0x' + strip_0x(actor) path_info = '/tx/user/0x' + strip_0x(actor)
if query_offset != None: if query_offset != None:
path_info += '/' + str(query_offset) path_info += '/' + str(query_offset)
if query_limit == None: if query_limit != None:
query_limit = 100 if query_offset == None:
path_info += '/' + str(query_limit) path_info += '/0'
if query_offset == None: path_info += '/' + str(query_limit)
path_info += '/0'
env = { env = {
'PATH_INFO': path_info, 'PATH_INFO': path_info,
} }
@ -193,7 +192,7 @@ def test_query_process_txs_bloom(
@pytest.mark.parametrize( @pytest.mark.parametrize(
'query_block_start, query_block_end, query_match_count', 'query_block_start, query_block_end, query_match_count',
[ [
(1, 42, 0), (None, 42, 0),
(420000, 420001, 1), (420000, 420001, 1),
(419999, 419999, 1), # matches are inclusive (419999, 419999, 1), # matches are inclusive
(419999, 420000, 2), (419999, 420000, 2),
@ -212,7 +211,7 @@ def test_query_process_txs_data(
query_match_count, query_match_count,
): ):
path_info = '/txa/100/0' path_info = '/txa'
if query_block_start != None: if query_block_start != None:
path_info += '/' + str(query_block_start) path_info += '/' + str(query_block_start)
if query_block_end != None: if query_block_end != None:
@ -228,5 +227,4 @@ def test_query_process_txs_data(
assert r != None assert r != None
o = json.loads(r[1]) o = json.loads(r[1])
logg.debug('oo {}'.format(o))
assert len(o['data']) == query_match_count assert len(o['data']) == query_match_count

View File

@ -1,5 +1,5 @@
celery==4.4.7 celery==4.4.7
erc20-demurrage-token~=0.0.6 erc20-demurrage-token~=0.0.5a3
cic-eth-registry~=0.6.3 cic-eth-registry~=0.6.1a6
chainlib~=0.0.14 chainlib~=0.0.9rc1
cic_eth~=0.12.6 cic_eth~=0.12.4a11

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = cic-eth-aux-erc20-demurrage-token name = cic-eth-aux-erc20-demurrage-token
version = 0.0.3 version = 0.0.2a7
description = cic-eth tasks supporting erc20 demurrage token description = cic-eth tasks supporting erc20 demurrage token
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no

View File

@ -1,4 +1,5 @@
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
hexathon~=0.1.0 cic-eth-registry>=0.6.1a6,<0.7.0
chainqueue~=0.0.6a4 hexathon~=0.0.1a8
eth-erc20~=0.1.5 chainqueue>=0.0.4a6,<0.1.0
eth-erc20>=0.1.2a2,<0.2.0

View File

@ -515,7 +515,7 @@ class Api(ApiBase):
:param password: Password to encode the password with in the backend (careful, you will have to remember it) :param password: Password to encode the password with in the backend (careful, you will have to remember it)
:type password: str :type password: str
:param register: Register the new account in accounts index backend :param register: Register the new account in accounts index backend
:type register: bool :type password: bool
:returns: uuid of root task :returns: uuid of root task
:rtype: celery.Task :rtype: celery.Task
""" """

View File

@ -63,32 +63,22 @@ class Config(BaseConfig):
config.get('REDIS_HOST'), config.get('REDIS_HOST'),
config.get('REDIS_PORT'), config.get('REDIS_PORT'),
) )
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
redis_url = ( redis_url = (
'redis', 'redis',
hostport, hostport,
db, getattr(args, 'redis_db', None),
) )
celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL')) celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL'))
hostport = urlhostmerge( hostport = urlhostmerge(
celery_config_url[1], celery_config_url[1],
getattr(args, 'celery_host', None), getattr(args, 'celery_host', None),
getattr(args, 'celery_port', None), getattr(args, 'celery_port', None),
) )
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
celery_arg_url = ( celery_arg_url = (
getattr(args, 'celery_scheme', None), getattr(args, 'celery_scheme', None),
hostport, hostport,
db, getattr(args, 'celery_db', None),
) )
celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url) celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url)
celery_url_string = urllib.parse.urlunsplit(celery_url) celery_url_string = urllib.parse.urlunsplit(celery_url)
local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string

View File

@ -22,7 +22,7 @@ from hexathon import (
from chainqueue.error import NotLocalTxError from chainqueue.error import NotLocalTxError
from eth_erc20 import ERC20 from eth_erc20 import ERC20
from chainqueue.sql.tx import cache_tx_dict from chainqueue.sql.tx import cache_tx_dict
from okota.token_index.index import to_identifier from okota.token_index import to_identifier
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
@ -46,14 +46,13 @@ from cic_eth.task import (
from cic_eth.eth.nonce import CustodialTaskNonceOracle from cic_eth.eth.nonce import CustodialTaskNonceOracle
from cic_eth.encode import tx_normalize from cic_eth.encode import tx_normalize
from cic_eth.eth.trust import verify_proofs from cic_eth.eth.trust import verify_proofs
from cic_eth.error import SignerError
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
@celery_app.task(bind=True, base=CriticalWeb3Task) @celery_app.task(base=CriticalWeb3Task)
def balance(self, tokens, holder_address, chain_spec_dict): def balance(tokens, holder_address, chain_spec_dict):
"""Return token balances for a list of tokens for given address """Return token balances for a list of tokens for given address
:param tokens: Token addresses :param tokens: Token addresses
@ -72,9 +71,8 @@ def balance(self, tokens, holder_address, chain_spec_dict):
for t in tokens: for t in tokens:
address = t['address'] address = t['address']
logg.debug('address {} {}'.format(address, holder_address)) logg.debug('address {} {}'.format(address, holder_address))
gas_oracle = self.create_gas_oracle(rpc, min_price=self.min_fee_price)
token = ERC20Token(chain_spec, rpc, add_0x(address)) token = ERC20Token(chain_spec, rpc, add_0x(address))
c = ERC20(chain_spec, gas_oracle=gas_oracle) c = ERC20(chain_spec)
o = c.balance_of(address, holder_address, sender_address=caller_address) o = c.balance_of(address, holder_address, sender_address=caller_address)
r = rpc.do(o) r = rpc.do(o)
t['balance_network'] = c.parse_balance(r) t['balance_network'] = c.parse_balance(r)
@ -397,8 +395,6 @@ def cache_transfer_data(
sender_address = tx_normalize.wallet_address(tx['from']) sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0]) recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1] token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session() session = SessionBase.create_session()
@ -406,8 +402,8 @@ def cache_transfer_data(
'hash': tx_hash_hex, 'hash': tx_hash_hex,
'from': sender_address, 'from': sender_address,
'to': recipient_address, 'to': recipient_address,
'source_token': source_token_address, 'source_token': tx['to'],
'destination_token': destination_token_address, 'destination_token': tx['to'],
'from_value': token_value, 'from_value': token_value,
'to_value': token_value, 'to_value': token_value,
} }
@ -439,16 +435,14 @@ def cache_transfer_from_data(
spender_address = tx_data[0] spender_address = tx_data[0]
recipient_address = tx_data[1] recipient_address = tx_data[1]
token_value = tx_data[2] token_value = tx_data[2]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session() session = SessionBase.create_session()
tx_dict = { tx_dict = {
'hash': tx_hash_hex, 'hash': tx_hash_hex,
'from': tx['from'], 'from': tx['from'],
'to': recipient_address, 'to': recipient_address,
'source_token': source_token_address, 'source_token': tx['to'],
'destination_token': destination_token_address, 'destination_token': tx['to'],
'from_value': token_value, 'from_value': token_value,
'to_value': token_value, 'to_value': token_value,
} }
@ -480,16 +474,14 @@ def cache_approve_data(
sender_address = tx_normalize.wallet_address(tx['from']) sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0]) recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1] token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session() session = SessionBase.create_session()
tx_dict = { tx_dict = {
'hash': tx_hash_hex, 'hash': tx_hash_hex,
'from': sender_address, 'from': sender_address,
'to': recipient_address, 'to': recipient_address,
'source_token': source_token_address, 'source_token': tx['to'],
'destination_token': destination_token_address, 'destination_token': tx['to'],
'from_value': token_value, 'from_value': token_value,
'to_value': token_value, 'to_value': token_value,
} }

View File

@ -92,7 +92,7 @@ def apply_gas_value_cache_local(address, method, value, tx_hash, session=None):
if o == None: if o == None:
o = GasCache(address, method, value, tx_hash) o = GasCache(address, method, value, tx_hash)
elif value > o.value: elif tx.gas_used > o.value:
o.value = value o.value = value
o.tx_hash = strip_0x(tx_hash) o.tx_hash = strip_0x(tx_hash)

View File

@ -72,7 +72,7 @@ def __balance_incoming_compatible(token_address, receiver_address):
status_compare = dead() status_compare = dead()
q = q.filter(Otx.status.op('&')(status_compare)==0) q = q.filter(Otx.status.op('&')(status_compare)==0)
# TODO: this can change the result for the recipient if tx is later obsoleted and resubmission is delayed. # TODO: this can change the result for the recipient if tx is later obsoleted and resubmission is delayed.
#q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK) q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
q = q.filter(TxCache.destination_token_address==token_address) q = q.filter(TxCache.destination_token_address==token_address)
delta = 0 delta = 0
for r in q.all(): for r in q.all():

View File

@ -17,7 +17,7 @@ from cic_eth_registry.error import UnknownContractError
# local imports # local imports
from cic_eth.error import SeppukuError from cic_eth.error import SeppukuError
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.eth.util import CacheGasOracle, MaxGasOracle from cic_eth.eth.util import CacheGasOracle
#logg = logging.getLogger().getChild(__name__) #logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger() logg = logging.getLogger()
@ -25,14 +25,12 @@ logg = logging.getLogger()
celery_app = celery.current_app celery_app = celery.current_app
class BaseTask(celery.Task): class BaseTask(celery.Task):
session_func = SessionBase.create_session session_func = SessionBase.create_session
call_address = ZERO_ADDRESS call_address = ZERO_ADDRESS
trusted_addresses = [] trusted_addresses = []
min_fee_price = 1 min_fee_price = 1
min_fee_limit = 30000
default_token_address = None default_token_address = None
default_token_symbol = None default_token_symbol = None
default_token_name = None default_token_name = None
@ -41,28 +39,21 @@ class BaseTask(celery.Task):
def create_gas_oracle(self, conn, address=None, *args, **kwargs): def create_gas_oracle(self, conn, address=None, *args, **kwargs):
x = None if address == None:
if address is None: return RPCGasOracle(
x = RPCGasOracle(
conn, conn,
code_callback=kwargs.get('code_callback', self.get_min_fee_limit), code_callback=kwargs.get('code_callback'),
min_price=self.min_fee_price, min_price=self.min_fee_price,
id_generator=kwargs.get('id_generator'), id_generator=kwargs.get('id_generator'),
) )
else:
x = MaxGasOracle(conn) return CacheGasOracle(
x.code_callback = x.get_fee_units conn,
address,
return x method=kwargs.get('method'),
min_price=self.min_fee_price,
id_generator=kwargs.get('id_generator'),
def get_min_fee_limit(self, code): )
return self.min_fee_limit
def get_min_fee_limit(self, code):
return self.min_fee_limit
def create_session(self): def create_session(self):

View File

@ -11,6 +11,13 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_ARGS="" ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple ARG PIP_INDEX_URL=https://pypi.org/simple
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./ COPY *requirements.txt ./
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \ pip install --index-url $PIP_INDEX_URL \
@ -33,6 +40,8 @@ RUN chmod 755 *.sh
# # they can all be overridden by environment variables # # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package) # # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/ #COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library? # TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh #COPY util/liveness/health.sh /usr/local/bin/health.sh
@ -57,7 +66,8 @@ ENTRYPOINT []
## # they can all be overridden by environment variables ## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package) ## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/ #COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/ #COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY scripts/ scripts/ #COPY scripts/ scripts/
# #
## TODO this kind of code sharing across projects should be discouraged...can we make util a library? ## TODO this kind of code sharing across projects should be discouraged...can we make util a library?

View File

@ -1,7 +1,4 @@
celery==4.4.7 celery==4.4.7
chainlib-eth>=0.0.10a20,<0.1.0
semver==2.13.0 semver==2.13.0
chainlib-eth~=0.0.15 urlybird~=0.0.1a2
urlybird~=0.0.1
cic-eth-registry~=0.6.6
cic-types~=0.2.1a8
cic-eth-aux-erc20-demurrage-token~=0.0.3

View File

@ -1,15 +1,16 @@
chainqueue~=0.0.6a4 chainqueue>=0.0.6a1,<0.1.0
chainsyncer[sql]~=0.0.7 chainsyncer[sql]>=0.0.7a3,<0.1.0
alembic==1.4.2 alembic==1.4.2
confini~=0.5.3 confini>=0.3.6rc4,<0.5.0
redis==3.5.3 redis==3.5.3
hexathon~=0.1.0 hexathon~=0.0.1a8
pycryptodome==3.10.1 pycryptodome==3.10.1
liveness~=0.0.1a7 liveness~=0.0.1a7
eth-address-index~=0.2.4 eth-address-index>=0.2.4a1,<0.3.0
eth-accounts-index~=0.1.2 eth-accounts-index>=0.1.2a3,<0.2.0
erc20-faucet~=0.3.2 cic-eth-registry>=0.6.1a6,<0.7.0
erc20-transfer-authorization~=0.3.6 erc20-faucet>=0.3.2a2,<0.4.0
sarafu-faucet~=0.0.7 erc20-transfer-authorization>=0.3.5a2,<0.4.0
moolb~=0.2.0 sarafu-faucet>=0.0.7a2,<0.1.0
okota~=0.2.5 moolb~=0.1.1b2
okota>=0.2.4a6,<0.3.0

View File

@ -1,7 +1,7 @@
[metadata] [metadata]
name = cic-eth name = cic-eth
#version = attr: cic_eth.version.__version_string__ #version = attr: cic_eth.version.__version_string__
version = 0.12.7 version = 0.12.5a2
description = CIC Network Ethereum interaction description = CIC Network Ethereum interaction
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no

View File

@ -6,5 +6,4 @@ pytest-redis==2.0.0
redis==3.5.3 redis==3.5.3
eth-tester==0.5.0b3 eth-tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
eth-erc20~=0.1.5 eth-erc20~=0.1.2a2
erc20-transfer-authorization~=0.3.6

View File

@ -40,7 +40,6 @@ def test_filter_gas(
foo_token, foo_token,
token_registry, token_registry,
register_lookups, register_lookups,
register_tokens,
celery_session_worker, celery_session_worker,
cic_registry, cic_registry,
): ):
@ -70,7 +69,7 @@ def test_filter_gas(
tx = Tx(tx_src, block=block) tx = Tx(tx_src, block=block)
tx.apply_receipt(rcpt) tx.apply_receipt(rcpt)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database) t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert t.get() == None assert t == None
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc) nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)

View File

@ -288,6 +288,7 @@ def test_fix_nonce(
init_database.commit() init_database.commit()
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database) txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys() ks = txs.keys()
assert len(ks) == 2 assert len(ks) == 2

View File

@ -191,17 +191,11 @@ def test_tokens(
break break
api_param = str(uuid.uuid4()) api_param = str(uuid.uuid4())
fp = os.path.join(CallbackTask.mmap_path, api_param)
f = open(fp, 'wb+')
f.write(b'\x00')
f.close()
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback') api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
t = api.tokens(['BAR'], proof=[[bar_token_declaration]]) t = api.tokens(['BAR'], proof=[[bar_token_declaration]])
r = t.get() r = t.get()
logg.debug('rr {} {}'.format(r, t.children)) logg.debug('rr {} {}'.format(r, t.children))
while True: while True:
fp = os.path.join(CallbackTask.mmap_path, api_param) fp = os.path.join(CallbackTask.mmap_path, api_param)
try: try:

View File

@ -35,26 +35,10 @@ from hexathon import strip_0x
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.error import OutOfGasError from cic_eth.error import OutOfGasError
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.task import BaseTask
logg = logging.getLogger() logg = logging.getLogger()
def test_task_gas_limit(
eth_rpc,
eth_signer,
default_chain_spec,
agent_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
gas_oracle = BaseTask().create_gas_oracle(rpc)
c = Gas(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 10, tx_format=TxFormat.RLP_SIGNED)
tx = unpack(bytes.fromhex(strip_0x(o)), default_chain_spec)
assert (tx['gas'], BaseTask.min_fee_price)
def test_task_check_gas_ok( def test_task_check_gas_ok(
default_chain_spec, default_chain_spec,
eth_rpc, eth_rpc,

View File

@ -143,7 +143,7 @@ def test_incoming_balance(
'converters': [], 'converters': [],
} }
b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
assert b[0]['balance_incoming'] == 1000 assert b[0]['balance_incoming'] == 0
otx.readysend(session=init_database) otx.readysend(session=init_database)
init_database.flush() init_database.flush()
@ -152,8 +152,8 @@ def test_incoming_balance(
otx.sent(session=init_database) otx.sent(session=init_database)
init_database.commit() init_database.commit()
#b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
#assert b[0]['balance_incoming'] == 1000 assert b[0]['balance_incoming'] == 1000
otx.success(block=1024, session=init_database) otx.success(block=1024, session=init_database)
init_database.commit() init_database.commit()

View File

@ -1,5 +1,7 @@
chainqueue~=0.0.6a4 crypto-dev-signer>=0.4.15rc2,<=0.4.15
chainqueue>=0.0.5a3,<0.1.0
cic-eth-registry>=0.6.1a6,<0.7.0
redis==3.5.3 redis==3.5.3
hexathon~=0.1.0 hexathon~=0.0.1a8
pycryptodome==3.10.1 pycryptodome==3.10.1
pyxdg==0.27 pyxdg==0.27

View File

@ -1,9 +1,10 @@
[database] [DATABASE]
name=cic_notify_test user = postgres
user= password =
password= host = localhost
host=localhost port = 5432
port= name = /tmp/cic-notify.db
engine=sqlite #engine = postgresql
driver=pysqlite #driver = psycopg2
debug=0 engine = sqlite
driver = pysqlite

View File

@ -1,7 +0,0 @@
[report]
omit =
venv/*
scripts/*
cic_notify/db/migrations/*
cic_notify/runnable/*
cic_notify/version.py

View File

@ -3,7 +3,6 @@ import logging
import re import re
# third-party imports # third-party imports
import cic_notify.tasks.sms.db
from celery.app.control import Inspect from celery.app.control import Inspect
import celery import celery
@ -14,16 +13,45 @@ app = celery.current_app
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger() logg = logging.getLogger()
sms_tasks_matcher = r"^(cic_notify.tasks.sms)(\.\w+)?"
re_q = r'^cic-notify'
def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'):
host_queues = []
i = Inspect(app=app)
qs = i.active_queues()
for host in qs.keys():
for q in qs[host]:
if re.match(re_q, q['name']):
host_queues.append((host, q['name'],))
task_prefix_len = len(task_prefix)
queue_tasks = []
for (host, queue) in host_queues:
i = Inspect(app=app, destination=[host])
for tasks in i.registered_tasks().values():
for task in tasks:
if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix:
queue_tasks.append((queue, task,))
return queue_tasks
class Api: class Api:
def __init__(self, queue: any = 'cic-notify'): # TODO: Implement callback strategy
def __init__(self, queue=None):
""" """
:param queue: The queue on which to execute notification tasks :param queue: The queue on which to execute notification tasks
:type queue: str :type queue: str
""" """
self.queue = queue self.queue = queue
self.sms_tasks = get_sms_queue_tasks(app)
logg.debug('sms tasks {}'.format(self.sms_tasks))
def sms(self, message: str, recipient: str):
def sms(self, message, recipient):
"""This function chains all sms tasks in order to send a message, log and persist said data to disk """This function chains all sms tasks in order to send a message, log and persist said data to disk
:param message: The message to be sent to the recipient. :param message: The message to be sent to the recipient.
:type message: str :type message: str
@ -32,9 +60,24 @@ class Api:
:return: a celery Task :return: a celery Task
:rtype: Celery.Task :rtype: Celery.Task
""" """
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=self.queue) signatures = []
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=self.queue) for q in self.sms_tasks:
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=self.queue) if not self.queue:
signatures = [s_send, s_log, s_persist_notification] queue = q[0]
return celery.group(signatures)() else:
queue = self.queue
signature = celery.signature(
q[1],
[
message,
recipient,
],
queue=queue,
)
signatures.append(signature)
t = celery.group(signatures)()
return t

View File

@ -2,7 +2,7 @@
[alembic] [alembic]
# path to migration scripts # path to migration scripts
script_location = . script_location = migrations
# template used to generate migration files # template used to generate migration files
# file_template = %%(rev)s_%%(slug)s # file_template = %%(rev)s_%%(slug)s
@ -27,17 +27,28 @@ script_location = .
# sourceless = false # sourceless = false
# version location specification; this defaults # version location specification; this defaults
# to ./versions. When using multiple version # to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path # directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./versions # version_locations = %(here)s/bar %(here)s/bat migrations/versions
# the output encoding used when revision files # the output encoding used when revision files
# are written from script.py.mako # are written from script.py.mako
# output_encoding = utf-8 # output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname sqlalchemy.url = postgres+psycopg2://postgres@localhost/cic-notify
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration # Logging configuration
[loggers] [loggers]
keys = root,sqlalchemy,alembic keys = root,sqlalchemy,alembic

View File

@ -11,7 +11,7 @@ config = context.config
# Interpret the config file for Python logging. # Interpret the config file for Python logging.
# This line sets up loggers basically. # This line sets up loggers basically.
fileConfig(config.config_file_name, disable_existing_loggers=True) fileConfig(config.config_file_name)
# add your model's MetaData object here # add your model's MetaData object here
# for 'autogenerate' support # for 'autogenerate' support
@ -56,14 +56,11 @@ def run_migrations_online():
and associate a connection with the context. and associate a connection with the context.
""" """
connectable = context.config.attributes.get("connection", None) connectable = engine_from_config(
config.get_section(config.config_ini_section),
if connectable is None: prefix="sqlalchemy.",
connectable = engine_from_config( poolclass=pool.NullPool,
context.config.get_section(context.config.config_ini_section), )
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection: with connectable.connect() as connection:
context.configure( context.configure(

View File

@ -7,7 +7,7 @@ import celery
celery_app = celery.current_app celery_app = celery.current_app
logg = celery_app.log.get_default_logger() logg = celery_app.log.get_default_logger()
local_logg = logging.getLogger() local_logg = logging.getLogger(__name__)
@celery_app.task @celery_app.task

View File

@ -9,7 +9,7 @@ import semver
logg = logging.getLogger() logg = logging.getLogger()
version = (0, 4, 0, 'alpha.12') version = (0, 4, 0, 'alpha.11')
version_object = semver.VersionInfo( version_object = semver.VersionInfo(
major=version[0], major=version[0],

View File

@ -1,4 +1,4 @@
confini~=0.5.1 confini>=0.3.6rc4,<0.5.0
africastalking==1.2.3 africastalking==1.2.3
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
alembic==1.4.2 alembic==1.4.2

View File

@ -1,9 +1,5 @@
Faker==11.1.0 pytest~=6.0.1
faker-e164==0.1.0 pytest-celery~=0.0.0a1
pytest==6.2.5 pytest-mock~=3.3.1
pytest-celery~=0.0.0 pysqlite3~=0.4.3
pytest-mock==3.6.1 pytest-cov==2.10.1
pysqlite3~=0.4.6
pytest-cov==3.0.0
pytest-alembic==0.7.0
requests-mock==1.9.3

View File

@ -1,28 +0,0 @@
import pytest
def test_single_head_revision(alembic_runner):
heads = alembic_runner.heads
head_count = len(heads)
assert head_count == 1
def test_upgrade(alembic_runner):
try:
alembic_runner.migrate_up_to("head")
except RuntimeError:
pytest.fail('Failed to upgrade to the head revision.')
def test_up_down_consistency(alembic_runner):
try:
for revision in alembic_runner.history.revisions:
alembic_runner.migrate_up_to(revision)
except RuntimeError:
pytest.fail('Failed to upgrade through each revision individually.')
try:
for revision in reversed(alembic_runner.history.revisions):
alembic_runner.migrate_down_to(revision)
except RuntimeError:
pytest.fail('Failed to downgrade through each revision individually.')

View File

@ -1,27 +0,0 @@
# standard imports
# external imports
from faker import Faker
from faker_e164.providers import E164Provider
# local imports
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
from cic_notify.db.models.notification import Notification
# test imports
from tests.helpers.phone import phone_number
def test_notification(init_database):
message = 'Hello world'
recipient = phone_number()
notification = Notification(NotificationTransportEnum.SMS, recipient, message)
init_database.add(notification)
init_database.commit()
notification = init_database.query(Notification).get(1)
assert notification.status == NotificationStatusEnum.UNKNOWN
assert notification.recipient == recipient
assert notification.message == message
assert notification.transport == NotificationTransportEnum.SMS

View File

@ -1,38 +0,0 @@
# standard imports
import os
# third-party imports
# local imports
from cic_notify.db import dsn_from_config
def test_dsn_from_config(load_config):
"""
"""
# test dsn for other db formats
overrides = {
'DATABASE_PASSWORD': 'password',
'DATABASE_DRIVER': 'psycopg2',
'DATABASE_ENGINE': 'postgresql'
}
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
dsn = dsn_from_config(load_config)
assert dsn == f"{scheme}://{load_config.get('DATABASE_USER')}:{load_config.get('DATABASE_PASSWORD')}@{load_config.get('DATABASE_HOST')}:{load_config.get('DATABASE_PORT')}/{load_config.get('DATABASE_NAME')}"
# undoes overrides to revert engine and drivers to sqlite
overrides = {
'DATABASE_PASSWORD': '',
'DATABASE_DRIVER': 'pysqlite',
'DATABASE_ENGINE': 'sqlite'
}
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
# test dsn for sqlite engine
dsn = dsn_from_config(load_config)
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
assert dsn == f'{scheme}:///{load_config.get("DATABASE_NAME")}'

View File

@ -1,75 +0,0 @@
# standard imports
import logging
import os
# external imports
import pytest
import requests_mock
# local imports
from cic_notify.error import NotInitializedError, AlreadyInitializedError, NotificationSendError
from cic_notify.tasks.sms.africastalking import AfricasTalkingNotifier
# test imports
from tests.helpers.phone import phone_number
def test_africas_talking_notifier(africastalking_response, caplog):
caplog.set_level(logging.DEBUG)
with pytest.raises(NotInitializedError) as error:
AfricasTalkingNotifier()
assert str(error.value) == ''
api_key = os.urandom(24).hex()
sender_id = 'bar'
username = 'sandbox'
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
africastalking_notifier = AfricasTalkingNotifier()
assert africastalking_notifier.sender_id == sender_id
assert africastalking_notifier.initiated is True
with pytest.raises(AlreadyInitializedError) as error:
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
assert str(error.value) == ''
with requests_mock.Mocker(real_http=False) as request_mocker:
message = 'Hello world.'
recipient = phone_number()
africastalking_response.get('SMSMessageData').get('Recipients')[0]['number'] = recipient
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert f'Africastalking response sender-id {africastalking_response}' in caplog.text
africastalking_notifier.sender_id = None
africastalking_notifier.send(message, recipient)
assert f'africastalking response no-sender-id {africastalking_response}' in caplog.text
with pytest.raises(NotificationSendError) as error:
status = 'InvalidPhoneNumber'
status_code = 403
africastalking_response.get('SMSMessageData').get('Recipients')[0]['status'] = status
africastalking_response.get('SMSMessageData').get('Recipients')[0]['statusCode'] = status_code
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert str(error.value) == f'Sending notification failed due to: {status}'
with pytest.raises(NotificationSendError) as error:
recipients = []
status = 'InsufficientBalance'
africastalking_response.get('SMSMessageData')['Recipients'] = recipients
africastalking_response.get('SMSMessageData')['Message'] = status
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert str(error.value) == f'Unexpected number of recipients: {len(recipients)}. Status: {status}'

View File

@ -1,26 +0,0 @@
# standard imports
# external imports
import celery
# local imports
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
from cic_notify.db.models.notification import Notification
# test imports
from tests.helpers.phone import phone_number
def test_persist_notification(celery_session_worker, init_database):
message = 'Hello world.'
recipient = phone_number()
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', (message, recipient)
)
s_persist_notification.apply_async().get()
notification = Notification.session.query(Notification).filter_by(recipient=recipient).first()
assert notification.status == NotificationStatusEnum.UNKNOWN
assert notification.recipient == recipient
assert notification.message == message
assert notification.transport == NotificationTransportEnum.SMS

View File

@ -1,21 +0,0 @@
# standard imports
import logging
# external imports
import celery
# local imports
# test imports
from tests.helpers.phone import phone_number
def test_log(caplog, celery_session_worker):
message = 'Hello world.'
recipient = phone_number()
caplog.set_level(logging.INFO)
s_log = celery.signature(
'cic_notify.tasks.sms.log.log', [message, recipient]
)
s_log.apply_async().get()
assert f'message to {recipient}: {message}' in caplog.text

View File

@ -1,24 +0,0 @@
# standard imports
# external imports
import celery
# local imports
from cic_notify.api import Api
# test imports
from tests.helpers.phone import phone_number
def test_api(celery_session_worker, mocker):
mocked_group = mocker.patch('celery.group')
message = 'Hello world.'
recipient = phone_number()
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=None)
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=None)
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=None)
signatures = [s_send, s_log, s_persist_notification]
api = Api(queue=None)
api.sms(message, recipient)
mocked_group.assert_called_with(signatures)

View File

@ -1,13 +1,31 @@
# standard imports # standard imports
import sys
import os
import pytest
import logging import logging
# third party imports # third party imports
import confini
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
sys.path.insert(0, root_dir)
# local imports # local imports
from cic_notify.db.models.base import SessionBase
#from transport.notification import AfricastalkingNotification
# test imports # fixtures
from tests.fixtures_config import *
from tests.fixtures_celery import *
from tests.fixtures_database import *
from .fixtures.celery import * logg = logging.getLogger()
from .fixtures.config import *
from .fixtures.database import *
from .fixtures.result import * #@pytest.fixture(scope='session')
#def africastalking_notification(
# load_config,
# ):
# return AfricastalkingNotificationTransport(load_config)
#

View File

@ -1,32 +0,0 @@
# standard imports
import os
import logging
# external imports
import pytest
from confini import Config
logg = logging.getLogger(__file__)
fixtures_dir = os.path.dirname(__file__)
root_directory = os.path.dirname(os.path.dirname(fixtures_dir))
@pytest.fixture(scope='session')
def alembic_config():
migrations_directory = os.path.join(root_directory, 'cic_notify', 'db', 'migrations', 'default')
file = os.path.join(migrations_directory, 'alembic.ini')
return {
'file': file,
'script_location': migrations_directory
}
@pytest.fixture(scope='session')
def load_config():
config_directory = os.path.join(root_directory, '.config/test')
config = Config(default_dir=config_directory)
config.process()
logg.debug('config loaded\n{}'.format(config))
return config

View File

@ -1,54 +0,0 @@
# standard imports
import os
# third-party imports
import pytest
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_notify.db import dsn_from_config
from cic_notify.db.models.base import SessionBase, create_engine
from .config import root_directory
@pytest.fixture(scope='session')
def alembic_engine(load_config):
data_source_name = dsn_from_config(load_config)
return create_engine(data_source_name)
@pytest.fixture(scope='session')
def database_engine(load_config):
if load_config.get('DATABASE_ENGINE') == 'sqlite':
try:
os.unlink(load_config.get('DATABASE_NAME'))
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
return dsn
@pytest.fixture(scope='function')
def init_database(load_config, database_engine):
db_directory = os.path.join(root_directory, 'cic_notify', 'db')
migrations_directory = os.path.join(db_directory, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_directory):
migrations_directory = os.path.join(db_directory, 'migrations', 'default')
session = SessionBase.create_session()
alembic_config = AlembicConfig(os.path.join(migrations_directory, 'alembic.ini'))
alembic_config.set_main_option('sqlalchemy.url', database_engine)
alembic_config.set_main_option('script_location', migrations_directory)
alembic.command.downgrade(alembic_config, 'base')
alembic.command.upgrade(alembic_config, 'head')
yield session
session.commit()
session.close()

View File

@ -1,24 +0,0 @@
# standard imports
# external imports
import pytest
# local imports
# test imports
@pytest.fixture(scope="function")
def africastalking_response():
return {
"SMSMessageData": {
"Message": "Sent to 1/1 Total Cost: KES 0.8000",
"Recipients": [{
"statusCode": 101,
"number": "+254711XXXYYY",
"status": "Success",
"cost": "KES 0.8000",
"messageId": "ATPid_SampleTxnId123"
}]
}
}

View File

@ -37,6 +37,12 @@ def celery_config():
shutil.rmtree(rq) shutil.rmtree(rq)
@pytest.fixture(scope='session')
def celery_worker_parameters():
return {
# 'queues': ('cic-notify'),
}
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def celery_enable_logging(): def celery_enable_logging():
return True return True

View File

@ -0,0 +1,20 @@
# standard imports
import os
import logging
# third-party imports
import pytest
import confini
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, '.config/test')
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))
return conf

View File

@ -0,0 +1,48 @@
# standard imports
import os
# third-party imports
import pytest
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_notify.db import SessionBase
from cic_notify.db import dsn_from_config
@pytest.fixture(scope='session')
def database_engine(
load_config,
):
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
return dsn
@pytest.fixture(scope='function')
def init_database(
load_config,
database_engine,
):
rootdir = os.path.dirname(os.path.dirname(__file__))
dbdir = os.path.join(rootdir, 'cic_notify', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrationsdir):
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
session = SessionBase.create_session()
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', database_engine)
ac.set_main_option('script_location', migrationsdir)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')
yield session
session.commit()
session.close()

View File

@ -1,16 +0,0 @@
# standard imports
# external imports
from faker import Faker
from faker_e164.providers import E164Provider
# local imports
# test imports
fake = Faker()
fake.add_provider(E164Provider)
def phone_number() -> str:
return fake.e164('KE')

View File

@ -0,0 +1,34 @@
# standard imports
import json
# third party imports
import pytest
import celery
# local imports
from cic_notify.tasks.sms import db
from cic_notify.tasks.sms import log
def test_log_notification(
celery_session_worker,
):
recipient = '+25412121212'
content = 'bar'
s_log = celery.signature('cic_notify.tasks.sms.log.log')
t = s_log.apply_async(args=[recipient, content])
r = t.get()
def test_db_notification(
init_database,
celery_session_worker,
):
recipient = '+25412121213'
content = 'foo'
s_db = celery.signature('cic_notify.tasks.sms.db.persist_notification')
t = s_db.apply_async(args=[recipient, content])
r = t.get()

View File

@ -4,4 +4,3 @@ omit =
scripts/* scripts/*
cic_ussd/db/migrations/* cic_ussd/db/migrations/*
cic_ussd/runnable/* cic_ussd/runnable/*
cic_ussd/version.py

View File

@ -1,22 +0,0 @@
# standard imports
# external imports
# local imports
class Guardianship:
guardians: list = []
@classmethod
def load_system_guardians(cls, guardians_file: str):
with open(guardians_file, 'r') as system_guardians:
cls.guardians = [line.strip() for line in system_guardians]
def is_system_guardian(self, phone_number: str):
"""
:param phone_number:
:type phone_number:
:return:
:rtype:
"""
return phone_number in self.guardians

View File

@ -13,6 +13,7 @@ from cic_types.condiments import MetadataPointer
from cic_ussd.account.chain import Chain from cic_ussd.account.chain import Chain
from cic_ussd.account.transaction import from_wei from cic_ussd.account.transaction import from_wei
from cic_ussd.cache import cache_data_key, get_cached_data from cic_ussd.cache import cache_data_key, get_cached_data
from cic_ussd.translation import translation_for
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -96,3 +97,17 @@ def query_statement(blockchain_address: str, limit: int = 9):
callback_param=blockchain_address callback_param=blockchain_address
) )
cic_eth_api.list(address=blockchain_address, limit=limit) cic_eth_api.list(address=blockchain_address, limit=limit)
def statement_transaction_set(preferred_language: str, transaction_reprs: list):
"""
:param preferred_language:
:type preferred_language:
:param transaction_reprs:
:type transaction_reprs:
:return:
:rtype:
"""
if not transaction_reprs:
return translation_for('helpers.no_transaction_history', preferred_language)
return ''.join(f'{transaction_repr}\n' for transaction_repr in transaction_reprs)

View File

@ -14,7 +14,8 @@ from cic_ussd.account.chain import Chain
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
from cic_ussd.error import CachedDataNotFoundError, SeppukuError from cic_ussd.error import CachedDataNotFoundError, SeppukuError
from cic_ussd.metadata.tokens import query_token_info, query_token_metadata from cic_ussd.metadata.tokens import query_token_info, query_token_metadata
from cic_ussd.processor.poller import wait_for_cache from cic_ussd.processor.util import wait_for_cache
from cic_ussd.translation import translation_for
logg = logging.getLogger(__file__) logg = logging.getLogger(__file__)
@ -325,3 +326,16 @@ def set_active_token(blockchain_address: str, token_symbol: str):
cache_data(key=key, data=token_symbol) cache_data(key=key, data=token_symbol)
def token_list_set(preferred_language: str, token_data_reprs: list):
"""
:param preferred_language:
:type preferred_language:
:param token_data_reprs:
:type token_data_reprs:
:return:
:rtype:
"""
if not token_data_reprs:
return translation_for('helpers.no_tokens_list', preferred_language)
return ''.join(f'{token_data_repr}\n' for token_data_repr in token_data_reprs)

View File

@ -14,7 +14,7 @@ class Cache:
store: Redis = None store: Redis = None
def cache_data(key: str, data: [bytes, float, int, str]): def cache_data(key: str, data: str):
""" """
:param key: :param key:
:type key: :type key:
@ -55,6 +55,5 @@ def cache_data_key(identifier: Union[list, bytes], salt: MetadataPointer):
hash_object.update(identity) hash_object.update(identity)
else: else:
hash_object.update(identifier) hash_object.update(identifier)
if salt != MetadataPointer.NONE: hash_object.update(salt.value.encode(encoding="utf-8"))
hash_object.update(salt.value.encode(encoding="utf-8"))
return hash_object.digest().hex() return hash_object.digest().hex()

View File

@ -1,9 +0,0 @@
+254707628499
+254757628885
+254757628900
+254792048646
+254792048228
+254792048490
+254792048902
+254727806655
+254790079966

View File

@ -1,19 +0,0 @@
keys,en,sw,kam,kik,miji,luo,bor
female,Female,Mwanamke,Mundumuka,Mutumia,Muche,Dhako,Uwole
from,From,Kutoka kwa,Kuma kwa,Kuuma kwa,Ulaako,Kowuok kuom,ira
male,Male,Mwanaume,Mundume,Mundurume,Mulume,Dichuo,Dir
not_provided,Not provided,Haijawekwa,Inenganitwe,Ndiikiritwo,Kaphana,Okoketi,Kes inkan
no_language_list,No language list,Hamna lugha ya kuchagua,Vai luka ya kusakwa,Hatire ruthiomi rwakucagurwo,Kahana luga irio orodeshwa,Onge dhok miyiero,Afaan chaguad injirt
no_transaction_history,No transaction history,Hamna ripoti ya matumizi,Vai livoti ya utumii,Hatire riboti ya mahuthira,Kahana repoti ya mahumizi,Onge ripot mar tiyo,Odhuu jalkaban injirt
no_tokens_list,No more Sarafu,Hamna sarafu zingine,Vai Sarafu ingi,Hatire Sarafu inge,Kahana Sarafu zaidi,Onge Sarafu moko,Sarafu dibii injirt
other,Other,Nyingine,Ingi,Inge,Nyinjine,Moko,Ta dibii
received,Received,Ulipokea,Niwakwatie,Niuramukirire ,Hokera,Niyudo,Argat
sent,Sent,Ulituma,Niwatumie,Niuratumire,Humwa,Nioro,Ergan
to,To,Kwa,Kwa,Hare,Kwa,Ne,Es
guardians_list_header,Your PIN guards are:,PIN Walinzi uliowaongeza ni:,PIN Atetheesya ala wongelile ni:,Agiteri a PIN yaku ni:,PIN Aimirizi urioika ni:,PIN Jorit magi gin:,PIN Naam at korkorad:
no_guardians_list,No PIN guardians set,Hamna PIN walinzi walioongezwa,Vai atetheesya mongelwa,Hartire agiteri meekeretwo,Kahana aimirizi adzoikwa,Onge jorit moketi,Nam an korkorad injirt
error.no_phone_number_provided,No phone number was provided,Nambari ya simu haijawekwa,Namba ya simu inaikiwa,Namba ya thimu ndihianetwo,Kahana namba ya simu idzopewa,Namba mar simu okoketi,Namba simu kees inkaan
error.no_matching_account,The number provided is not registered,Nambari uliyoweka haijasajiliwa,Namba ya simu ila wekiya ti mbandikithye,Namba iria wekera ndiandekithetwo,Namba idzopewa kaidzagwe kusajiliwa,Namba mar simu miketo pok ondiki,Namba ka at kekeet sajiil incab
error.is_initiator,Phone number cannot be your own,Nambari yafaa kuwa tofauti na yako,Namba ya simu yaile ithiwa itavwanene na yaku,Namba ifatie gukorwo ina utiganu na yaku,Namba yasimu kaidima kukala niyako,Namba onego obed mopogre gimari,Namba simu tete tau mal
error.is_existent_guardian,This phone number is already added as a PIN guardian,Nambari hii tayari imeongezwa kama mlinzi wa nambari ya siri,Namba ii niyongeletwe tayari ta mutethesya wa kusovya pin,Namba ino niyongereirwo ta murugamereri ya namba ya thiri,Nambari ii yasimu yaikwa kare Muimirizi,Nambani oseketi kaka jarit,Namba tana yayu nam korkoradi taat
error.is_not_existent_guardian,Phone number not set as PIN guardian,Nambari hii haijaongezwa kama mlinzi wa nambari ya PIN,Namba ii iyongeletwe ta mutethesya wa kusovya PIN,Namba ino ndiongereirwo ta mugiteri wa PIN,Nambari ii yasimu kaiikika kugaluza PIN zda mwimirizi,Nambani pok omed kaka jarit,Namba simu ta nam korkorad indharan
1 keys en sw kam kik miji luo bor
2 female Female Mwanamke Mundumuka Mutumia Muche Dhako Uwole
3 from From Kutoka kwa Kuma kwa Kuuma kwa Ulaako Kowuok kuom ira
4 male Male Mwanaume Mundume Mundurume Mulume Dichuo Dir
5 not_provided Not provided Haijawekwa Inenganitwe Ndiikiritwo Kaphana Okoketi Kes inkan
6 no_language_list No language list Hamna lugha ya kuchagua Vai luka ya kusakwa Hatire ruthiomi rwakucagurwo Kahana luga irio orodeshwa Onge dhok miyiero Afaan chaguad injirt
7 no_transaction_history No transaction history Hamna ripoti ya matumizi Vai livoti ya utumii Hatire riboti ya mahuthira Kahana repoti ya mahumizi Onge ripot mar tiyo Odhuu jalkaban injirt
8 no_tokens_list No more Sarafu Hamna sarafu zingine Vai Sarafu ingi Hatire Sarafu inge Kahana Sarafu zaidi Onge Sarafu moko Sarafu dibii injirt
9 other Other Nyingine Ingi Inge Nyinjine Moko Ta dibii
10 received Received Ulipokea Niwakwatie Niuramukirire Hokera Niyudo Argat
11 sent Sent Ulituma Niwatumie Niuratumire Humwa Nioro Ergan
12 to To Kwa Kwa Hare Kwa Ne Es
13 guardians_list_header Your PIN guards are: PIN Walinzi uliowaongeza ni: PIN Atetheesya ala wongelile ni: Agiteri a PIN yaku ni: PIN Aimirizi urioika ni: PIN Jorit magi gin: PIN Naam at korkorad:
14 no_guardians_list No PIN guardians set Hamna PIN walinzi walioongezwa Vai atetheesya mongelwa Hartire agiteri meekeretwo Kahana aimirizi adzoikwa Onge jorit moketi Nam an korkorad injirt
15 error.no_phone_number_provided No phone number was provided Nambari ya simu haijawekwa Namba ya simu inaikiwa Namba ya thimu ndihianetwo Kahana namba ya simu idzopewa Namba mar simu okoketi Namba simu kees inkaan
16 error.no_matching_account The number provided is not registered Nambari uliyoweka haijasajiliwa Namba ya simu ila wekiya ti mbandikithye Namba iria wekera ndiandekithetwo Namba idzopewa kaidzagwe kusajiliwa Namba mar simu miketo pok ondiki Namba ka at kekeet sajiil incab
17 error.is_initiator Phone number cannot be your own Nambari yafaa kuwa tofauti na yako Namba ya simu yaile ithiwa itavwanene na yaku Namba ifatie gukorwo ina utiganu na yaku Namba yasimu kaidima kukala niyako Namba onego obed mopogre gimari Namba simu tete tau mal
18 error.is_existent_guardian This phone number is already added as a PIN guardian Nambari hii tayari imeongezwa kama mlinzi wa nambari ya siri Namba ii niyongeletwe tayari ta mutethesya wa kusovya pin Namba ino niyongereirwo ta murugamereri ya namba ya thiri Nambari ii yasimu yaikwa kare Muimirizi Nambani oseketi kaka jarit Namba tana yayu nam korkoradi taat
19 error.is_not_existent_guardian Phone number not set as PIN guardian Nambari hii haijaongezwa kama mlinzi wa nambari ya PIN Namba ii iyongeletwe ta mutethesya wa kusovya PIN Namba ino ndiongereirwo ta mugiteri wa PIN Nambari ii yasimu kaiikika kugaluza PIN zda mwimirizi Nambani pok omed kaka jarit Namba simu ta nam korkorad indharan

View File

@ -1,9 +0,0 @@
{
"en": "English",
"sw": "Kiswahili",
"kam": "Kamba",
"kik": "Kikiuyu",
"miji": "Mijikenda",
"luo": "Luo",
"bor": "Borana"
}

View File

@ -1,7 +0,0 @@
keys,en,sw,kam,kik,miji,luo,bor
account_successfully_created,You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone},Umesajiliwa kwa Sarafu Network! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone},Niwayandikithya na Sarafu Network! Safaricom kuna namba ii *384*96# mitandao ingi *483*96#. Utethyo ungi kuna %{support_phone},Niweyandekithia kwe Sarafu Network! Kuhuthira hihinya *384*96# he Safaricom na *483*46# he mitambo ingi Uteithio %{support_phone},Usajiliwa Sarafu Network! kuhumira hopya *384*96# Saf *483*96# mtandao mnjine. Kuvizwa %{support_phone},Osendiki e Sarafu Network! Kidwatiyogo to dii *384*96# Safaricom kata *483*46# e netwak mamoko. Kuom kony %{support_phone},Yaayu sirejestan Sarafu Network! Kuches *384*96# Safaricom *483*46# Airtel
received_tokens,Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp} to %{tx_recipient_information} Balance %{balance} %{token_symbol},Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp} kuendea %{tx_recipient_information} Salio %{balance} %{token_symbol},Niwakwata %{amount} %{token_symbol} kuma %{tx_sender_information} %{timestamp} kuvikia %{tx_recipient_information} Mbalansi %{balance} %{token_symbol},Wamukira %{amount} %{token_symbol} kuuma kwa %{tx_sender_information} %{timestamp} to %{tx_recipient_information} Watigaria %{balance} %{token_symbol},Uphokera %{amount} %{token_symbol} kula %{tx_sender_information} %{timestamp} Kwenda %{tx_recipient_information}. Sazoro %{balance} %{token_symbol},Iyudo %{amount} %{token_symbol} kowuok kuom %{tx_sender_information} %{timestamp} odhi ne %{tx_recipient_information}. Dong mari en %{balance} %{token_symbol},Yaargat %{amount} %{token_symbol} ira %{tx_sender_information} %{timestamp} Es %{tx_recipient_information} Balansi %{balance} %{token_symbol}
sent_tokens,Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp} from %{tx_sender_information} Balance %{balance} %{token_symbol},Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kutoka kwa %{tx_sender_information} Salio %{balance} %{token_symbol},Niwatuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kuma %{tx_sender_information} Mbalansi %{balance} %{token_symbol}.,Watuma %{amount} %{token_symbol} kwe %{tx_recipient_information} %{timestamp} kuuma %{tx_sender_information} Watigaria %{balance} %{token_symbol},Uhuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kula %{tx_sender_information} Sazoro %{balance} %{token_symbol},Ioro %{amount} %{token_symbol} ne %{tx_recipient_information} %{timestamp} kowuok kuom %{tx_sender_information}. Dong mari en %{balance} %{token_symbol},yaergat %{amount} %{token_symbol} Es %{tx_recipient_information} %{timestamp} ira %{tx_sender_information} Balansi hareetin %{balance} %{token_symbol}
terms,By using the service you agree to the terms and conditions at http://grassecon.org/tos,Kwa kutumia hii huduma umekubali sheria na masharti yafuatayo http://grassecon.org/tos,Kwa kutumia mutandao uu niwetikilana na miyao na masharti ma http://grassecon.org/tos,"Kuhuthira mitambo ino , niuraetekania na mawatho na mutaratara wa http://grassecon.org/tos","Kuhumira huduma,Ukubali sheria na malagizo http://grassecon.org/tos ",Kuom tiyo gi huduma ni iyie chike kod weche mantie http://grassecon.org/tos,Oja service tun tumiith yaayuu kubalt one chuf at http://grassecon.org/tos
upsell_unregistered_recipient,%{tx_sender_information} tried to send you %{token_symbol}. Dial *384*96# on Safaricom and *483*96# on others For help %{support_phone},%{tx_sender_information} amejaribu kutuma %{token_symbol} na hujasajili. Bonyeza*384*96# Saf au*483*46# kwa mitandao tofauti. Usaidizi %{support_phone},%{tx_sender_information} niwatata kuutumia %{token_symbol} lakini ndwimwandikithye. Safaricom kuna *384*96# laini ingi *483*96# Utethyo %{support_phone},%{tx_sender_information} ekugeretie gugutumira %{token_symbol} no ndeyandikithetie. Hihinya *384*96# he Safaricom na *483*96# mitambo ingi. Uteithio %{support_phone},%{tx_sender_information} Yuhuma %{token_symbol} Kudzasajiliwa. Humira hopya *384*96# Safaricom au *483*96# mtandao mnjine. Kuvizwa %{support_phone},%{tx_sender_information} otemo oro ni %{token_symbol} to pok ondiki. Tiyo go dii *384*96# Safaricom gi *483*96# e netwak mamoko. E kony %{support_phone},%{tx_sender_information} yaa si ergu jariib %{token_symbol} ammo atin insajilan.Tumiitu kuches *384*96# Safaricom *483*96# dibii Qarqars %{support_phone}
pin_reset_initiated,%{pin_initiator} has sent a request to initiate your PIN reset,%{pin_initiator} ametuma ombi la kubadilisha PIN yako,%{pin_initiator} niwatuma wendi waku wa kwambiisya kusovya PIN yaku,%{pin_initiator} Niatuma ihoya ria guchengia PIN yaku,%{pin_initiator} yuhuma voyo kurekebisha piniyo.,%{pin_initiator} ooro kwayo mar loko nambani mopondo,%{pin_initiator} pin Tate badilishadu feet
1 keys en sw kam kik miji luo bor
2 account_successfully_created You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone} Umesajiliwa kwa Sarafu Network! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone} Niwayandikithya na Sarafu Network! Safaricom kuna namba ii *384*96# mitandao ingi *483*96#. Utethyo ungi kuna %{support_phone} Niweyandekithia kwe Sarafu Network! Kuhuthira hihinya *384*96# he Safaricom na *483*46# he mitambo ingi Uteithio %{support_phone} Usajiliwa Sarafu Network! kuhumira hopya *384*96# Saf *483*96# mtandao mnjine. Kuvizwa %{support_phone} Osendiki e Sarafu Network! Kidwatiyogo to dii *384*96# Safaricom kata *483*46# e netwak mamoko. Kuom kony %{support_phone} Yaayu sirejestan Sarafu Network! Kuches *384*96# Safaricom *483*46# Airtel
3 received_tokens Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp} to %{tx_recipient_information} Balance %{balance} %{token_symbol} Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp} kuendea %{tx_recipient_information} Salio %{balance} %{token_symbol} Niwakwata %{amount} %{token_symbol} kuma %{tx_sender_information} %{timestamp} kuvikia %{tx_recipient_information} Mbalansi %{balance} %{token_symbol} Wamukira %{amount} %{token_symbol} kuuma kwa %{tx_sender_information} %{timestamp} to %{tx_recipient_information} Watigaria %{balance} %{token_symbol} Uphokera %{amount} %{token_symbol} kula %{tx_sender_information} %{timestamp} Kwenda %{tx_recipient_information}. Sazoro %{balance} %{token_symbol} Iyudo %{amount} %{token_symbol} kowuok kuom %{tx_sender_information} %{timestamp} odhi ne %{tx_recipient_information}. Dong mari en %{balance} %{token_symbol} Yaargat %{amount} %{token_symbol} ira %{tx_sender_information} %{timestamp} Es %{tx_recipient_information} Balansi %{balance} %{token_symbol}
4 sent_tokens Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp} from %{tx_sender_information} Balance %{balance} %{token_symbol} Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kutoka kwa %{tx_sender_information} Salio %{balance} %{token_symbol} Niwatuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kuma %{tx_sender_information} Mbalansi %{balance} %{token_symbol}. Watuma %{amount} %{token_symbol} kwe %{tx_recipient_information} %{timestamp} kuuma %{tx_sender_information} Watigaria %{balance} %{token_symbol} Uhuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kula %{tx_sender_information} Sazoro %{balance} %{token_symbol} Ioro %{amount} %{token_symbol} ne %{tx_recipient_information} %{timestamp} kowuok kuom %{tx_sender_information}. Dong mari en %{balance} %{token_symbol} yaergat %{amount} %{token_symbol} Es %{tx_recipient_information} %{timestamp} ira %{tx_sender_information} Balansi hareetin %{balance} %{token_symbol}
5 terms By using the service you agree to the terms and conditions at http://grassecon.org/tos Kwa kutumia hii huduma umekubali sheria na masharti yafuatayo http://grassecon.org/tos Kwa kutumia mutandao uu niwetikilana na miyao na masharti ma http://grassecon.org/tos Kuhuthira mitambo ino , niuraetekania na mawatho na mutaratara wa http://grassecon.org/tos Kuhumira huduma,Ukubali sheria na malagizo http://grassecon.org/tos Kuom tiyo gi huduma ni iyie chike kod weche mantie http://grassecon.org/tos Oja service tun tumiith yaayuu kubalt one chuf at http://grassecon.org/tos
6 upsell_unregistered_recipient %{tx_sender_information} tried to send you %{token_symbol}. Dial *384*96# on Safaricom and *483*96# on others For help %{support_phone} %{tx_sender_information} amejaribu kutuma %{token_symbol} na hujasajili. Bonyeza*384*96# Saf au*483*46# kwa mitandao tofauti. Usaidizi %{support_phone} %{tx_sender_information} niwatata kuutumia %{token_symbol} lakini ndwimwandikithye. Safaricom kuna *384*96# laini ingi *483*96# Utethyo %{support_phone} %{tx_sender_information} ekugeretie gugutumira %{token_symbol} no ndeyandikithetie. Hihinya *384*96# he Safaricom na *483*96# mitambo ingi. Uteithio %{support_phone} %{tx_sender_information} Yuhuma %{token_symbol} Kudzasajiliwa. Humira hopya *384*96# Safaricom au *483*96# mtandao mnjine. Kuvizwa %{support_phone} %{tx_sender_information} otemo oro ni %{token_symbol} to pok ondiki. Tiyo go dii *384*96# Safaricom gi *483*96# e netwak mamoko. E kony %{support_phone} %{tx_sender_information} yaa si ergu jariib %{token_symbol} ammo atin insajilan.Tumiitu kuches *384*96# Safaricom *483*96# dibii Qarqars %{support_phone}
7 pin_reset_initiated %{pin_initiator} has sent a request to initiate your PIN reset %{pin_initiator} ametuma ombi la kubadilisha PIN yako %{pin_initiator} niwatuma wendi waku wa kwambiisya kusovya PIN yaku %{pin_initiator} Niatuma ihoya ria guchengia PIN yaku %{pin_initiator} yuhuma voyo kurekebisha piniyo. %{pin_initiator} ooro kwayo mar loko nambani mopondo %{pin_initiator} pin Tate badilishadu feet

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,10 @@ class Account(SessionBase):
def remove_guardian(self, phone_number: str): def remove_guardian(self, phone_number: str):
set_guardians = self.guardians.split(',') set_guardians = self.guardians.split(',')
set_guardians.remove(phone_number) set_guardians.remove(phone_number)
self.guardians = ','.join(set_guardians) if len(set_guardians) > 1:
self.guardians = ','.join(set_guardians)
else:
self.guardians = set_guardians[0]
def get_guardians(self) -> list: def get_guardians(self) -> list:
return self.guardians.split(',') if self.guardians else [] return self.guardians.split(',') if self.guardians else []
@ -168,7 +171,7 @@ class Account(SessionBase):
return check_password_hash(password, self.password_hash) return check_password_hash(password, self.password_hash)
def create(chain_str: str, phone_number: str, session: Session, preferred_language: str): def create(chain_str: str, phone_number: str, session: Session):
""" """
:param chain_str: :param chain_str:
:type chain_str: :type chain_str:
@ -176,14 +179,12 @@ def create(chain_str: str, phone_number: str, session: Session, preferred_langua
:type phone_number: :type phone_number:
:param session: :param session:
:type session: :type session:
:param preferred_language:
:type preferred_language:
:return: :return:
:rtype: :rtype:
""" """
api = Api(callback_task='cic_ussd.tasks.callback_handler.account_creation_callback', api = Api(callback_task='cic_ussd.tasks.callback_handler.account_creation_callback',
callback_queue='cic-ussd', callback_queue='cic-ussd',
callback_param=preferred_language, callback_param='',
chain_str=chain_str) chain_str=chain_str)
task_uuid = api.create_account().id task_uuid = api.create_account().id
TaskTracker.add(session=session, task_uuid=task_uuid) TaskTracker.add(session=session, task_uuid=task_uuid)

View File

@ -2,441 +2,417 @@
"ussd_menu": { "ussd_menu": {
"1": { "1": {
"description": "Entry point for users to select their preferred language.", "description": "Entry point for users to select their preferred language.",
"display_key": "ussd.initial_language_selection", "display_key": "ussd.kenya.initial_language_selection",
"name": "initial_language_selection", "name": "initial_language_selection",
"parent": null "parent": null
}, },
"2": { "2": {
"description": "Entry point for users to enter a pin to secure their account.", "description": "Entry point for users to enter a pin to secure their account.",
"display_key": "ussd.initial_pin_entry", "display_key": "ussd.kenya.initial_pin_entry",
"name": "initial_pin_entry", "name": "initial_pin_entry",
"parent": null "parent": null
}, },
"3": { "3": {
"description": "Pin confirmation entry menu.", "description": "Pin confirmation entry menu.",
"display_key": "ussd.initial_pin_confirmation", "display_key": "ussd.kenya.initial_pin_confirmation",
"name": "initial_pin_confirmation", "name": "initial_pin_confirmation",
"parent": "initial_pin_entry" "parent": "initial_pin_entry"
}, },
"4": { "4": {
"description": "The signup process has been initiated and the account is being created.", "description": "The signup process has been initiated and the account is being created.",
"display_key": "ussd.account_creation_prompt", "display_key": "ussd.kenya.account_creation_prompt",
"name": "account_creation_prompt", "name": "account_creation_prompt",
"parent": null "parent": null
}, },
"5": { "5": {
"description": "Entry point for activated users.", "description": "Entry point for activated users.",
"display_key": "ussd.start", "display_key": "ussd.kenya.start",
"name": "start", "name": "start",
"parent": null "parent": null
}, },
"6": { "6": {
"description": "Given name entry menu.", "description": "Given name entry menu.",
"display_key": "ussd.enter_given_name", "display_key": "ussd.kenya.enter_given_name",
"name": "enter_given_name", "name": "enter_given_name",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"7": { "7": {
"description": "Family name entry menu.", "description": "Family name entry menu.",
"display_key": "ussd.enter_family_name", "display_key": "ussd.kenya.enter_family_name",
"name": "enter_family_name", "name": "enter_family_name",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"8": { "8": {
"description": "Gender entry menu.", "description": "Gender entry menu.",
"display_key": "ussd.enter_gender", "display_key": "ussd.kenya.enter_gender",
"name": "enter_gender", "name": "enter_gender",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"9": { "9": {
"description": "Age entry menu.", "description": "Age entry menu.",
"display_key": "ussd.enter_gender", "display_key": "ussd.kenya.enter_gender",
"name": "enter_gender", "name": "enter_gender",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"10": { "10": {
"description": "Location entry menu.", "description": "Location entry menu.",
"display_key": "ussd.enter_location", "display_key": "ussd.kenya.enter_location",
"name": "enter_location", "name": "enter_location",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"11": { "11": {
"description": "Products entry menu.", "description": "Products entry menu.",
"display_key": "ussd.enter_products", "display_key": "ussd.kenya.enter_products",
"name": "enter_products", "name": "enter_products",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"12": { "12": {
"description": "Entry point for activated users.", "description": "Entry point for activated users.",
"display_key": "ussd.start", "display_key": "ussd.kenya.start",
"name": "start", "name": "start",
"parent": null "parent": null
}, },
"13": { "13": {
"description": "Send Token recipient entry.", "description": "Send Token recipient entry.",
"display_key": "ussd.enter_transaction_recipient", "display_key": "ussd.kenya.enter_transaction_recipient",
"name": "enter_transaction_recipient", "name": "enter_transaction_recipient",
"parent": "start" "parent": "start"
}, },
"14": { "14": {
"description": "Send Token amount prompt menu.", "description": "Send Token amount prompt menu.",
"display_key": "ussd.enter_transaction_amount", "display_key": "ussd.kenya.enter_transaction_amount",
"name": "enter_transaction_amount", "name": "enter_transaction_amount",
"parent": "start" "parent": "start"
}, },
"15": { "15": {
"description": "Pin entry for authorization to send token.", "description": "Pin entry for authorization to send token.",
"display_key": "ussd.transaction_pin_authorization", "display_key": "ussd.kenya.transaction_pin_authorization",
"name": "transaction_pin_authorization", "name": "transaction_pin_authorization",
"parent": "start" "parent": "start"
}, },
"16": { "16": {
"description": "Manage account menu.", "description": "Manage account menu.",
"display_key": "ussd.account_management", "display_key": "ussd.kenya.account_management",
"name": "account_management", "name": "account_management",
"parent": "start" "parent": "start"
}, },
"17": { "17": {
"description": "Manage metadata menu.", "description": "Manage metadata menu.",
"display_key": "ussd.metadata_management", "display_key": "ussd.kenya.metadata_management",
"name": "metadata_management", "name": "metadata_management",
"parent": "start" "parent": "start"
}, },
"18": { "18": {
"description": "Manage user's preferred language menu.", "description": "Manage user's preferred language menu.",
"display_key": "ussd.select_preferred_language", "display_key": "ussd.kenya.select_preferred_language",
"name": "select_preferred_language", "name": "select_preferred_language",
"parent": "account_management" "parent": "account_management"
}, },
"19": { "19": {
"description": "Retrieve mini-statement menu.", "description": "Retrieve mini-statement menu.",
"display_key": "ussd.mini_statement_pin_authorization", "display_key": "ussd.kenya.mini_statement_pin_authorization",
"name": "mini_statement_pin_authorization", "name": "mini_statement_pin_authorization",
"parent": "account_management" "parent": "account_management"
}, },
"20": { "20": {
"description": "Manage user's pin menu.", "description": "Manage user's pin menu.",
"display_key": "ussd.enter_current_pin", "display_key": "ussd.kenya.enter_current_pin",
"name": "enter_current_pin", "name": "enter_current_pin",
"parent": "account_management" "parent": "account_management"
}, },
"21": { "21": {
"description": "New pin entry menu.", "description": "New pin entry menu.",
"display_key": "ussd.enter_new_pin", "display_key": "ussd.kenya.enter_new_pin",
"name": "enter_new_pin", "name": "enter_new_pin",
"parent": "account_management" "parent": "account_management"
}, },
"22": { "22": {
"description": "Pin entry menu.", "description": "Pin entry menu.",
"display_key": "ussd.display_metadata_pin_authorization", "display_key": "ussd.kenya.display_metadata_pin_authorization",
"name": "display_metadata_pin_authorization", "name": "display_metadata_pin_authorization",
"parent": "start" "parent": "start"
}, },
"23": { "23": {
"description": "Exit menu.", "description": "Exit menu.",
"display_key": "ussd.exit", "display_key": "ussd.kenya.exit",
"name": "exit", "name": "exit",
"parent": null "parent": null
}, },
"24": { "24": {
"description": "Invalid menu option.", "description": "Invalid menu option.",
"display_key": "ussd.exit_invalid_menu_option", "display_key": "ussd.kenya.exit_invalid_menu_option",
"name": "exit_invalid_menu_option", "name": "exit_invalid_menu_option",
"parent": null "parent": null
}, },
"25": { "25": {
"description": "Pin policy violation.", "description": "Pin policy violation.",
"display_key": "ussd.exit_invalid_pin", "display_key": "ussd.kenya.exit_invalid_pin",
"name": "exit_invalid_pin", "name": "exit_invalid_pin",
"parent": null "parent": null
}, },
"26": { "26": {
"description": "Pin mismatch. New pin and the new pin confirmation do not match", "description": "Pin mismatch. New pin and the new pin confirmation do not match",
"display_key": "ussd.exit_pin_mismatch", "display_key": "ussd.kenya.exit_pin_mismatch",
"name": "exit_pin_mismatch", "name": "exit_pin_mismatch",
"parent": null "parent": null
}, },
"27": { "27": {
"description": "Ussd pin blocked Menu", "description": "Ussd pin blocked Menu",
"display_key": "ussd.exit_pin_blocked", "display_key": "ussd.kenya.exit_pin_blocked",
"name": "exit_pin_blocked", "name": "exit_pin_blocked",
"parent": null "parent": null
}, },
"28": { "28": {
"description": "Key params missing in request.", "description": "Key params missing in request.",
"display_key": "ussd.exit_invalid_request", "display_key": "ussd.kenya.exit_invalid_request",
"name": "exit_invalid_request", "name": "exit_invalid_request",
"parent": null "parent": null
}, },
"29": { "29": {
"description": "The user did not select a choice.", "description": "The user did not select a choice.",
"display_key": "ussd.exit_invalid_input", "display_key": "ussd.kenya.exit_invalid_input",
"name": "exit_invalid_input", "name": "exit_invalid_input",
"parent": null "parent": null
}, },
"30": { "30": {
"description": "Exit following unsuccessful transaction due to insufficient account balance.", "description": "Exit following unsuccessful transaction due to insufficient account balance.",
"display_key": "ussd.exit_insufficient_balance", "display_key": "ussd.kenya.exit_insufficient_balance",
"name": "exit_insufficient_balance", "name": "exit_insufficient_balance",
"parent": null "parent": null
}, },
"31": { "31": {
"description": "Exit following a successful transaction.", "description": "Exit following a successful transaction.",
"display_key": "ussd.exit_successful_transaction", "display_key": "ussd.kenya.exit_successful_transaction",
"name": "exit_successful_transaction", "name": "exit_successful_transaction",
"parent": null "parent": null
}, },
"32": { "32": {
"description": "End of a menu flow.", "description": "End of a menu flow.",
"display_key": "ussd.complete", "display_key": "ussd.kenya.complete",
"name": "complete", "name": "complete",
"parent": null "parent": null
}, },
"33": { "33": {
"description": "Pin entry menu to view account balances.", "description": "Pin entry menu to view account balances.",
"display_key": "ussd.account_balances_pin_authorization", "display_key": "ussd.kenya.account_balances_pin_authorization",
"name": "account_balances_pin_authorization", "name": "account_balances_pin_authorization",
"parent": "account_management" "parent": "account_management"
}, },
"34": { "34": {
"description": "Pin entry menu to view account statement.", "description": "Pin entry menu to view account statement.",
"display_key": "ussd.account_statement_pin_authorization", "display_key": "ussd.kenya.account_statement_pin_authorization",
"name": "account_statement_pin_authorization", "name": "account_statement_pin_authorization",
"parent": "account_management" "parent": "account_management"
}, },
"35": { "35": {
"description": "Menu to display account balances.", "description": "Menu to display account balances.",
"display_key": "ussd.account_balances", "display_key": "ussd.kenya.account_balances",
"name": "account_balances", "name": "account_balances",
"parent": "account_management" "parent": "account_management"
}, },
"36": { "36": {
"description": "Menu to display first set of transactions in statement.", "description": "Menu to display first set of transactions in statement.",
"display_key": "ussd.first_transaction_set", "display_key": "ussd.kenya.first_transaction_set",
"name": "first_transaction_set", "name": "first_transaction_set",
"parent": "account_management" "parent": null
}, },
"37": { "37": {
"description": "Menu to display middle set of transactions in statement.", "description": "Menu to display middle set of transactions in statement.",
"display_key": "ussd.middle_transaction_set", "display_key": "ussd.kenya.middle_transaction_set",
"name": "middle_transaction_set", "name": "middle_transaction_set",
"parent": null "parent": null
}, },
"38": { "38": {
"description": "Menu to display last set of transactions in statement.", "description": "Menu to display last set of transactions in statement.",
"display_key": "ussd.last_transaction_set", "display_key": "ussd.kenya.last_transaction_set",
"name": "last_transaction_set", "name": "last_transaction_set",
"parent": null "parent": null
}, },
"39": { "39": {
"description": "Menu to instruct users to call the office.", "description": "Menu to instruct users to call the office.",
"display_key": "ussd.help", "display_key": "ussd.kenya.help",
"name": "help", "name": "help",
"parent": null "parent": null
}, },
"40": { "40": {
"description": "Menu to display a user's entire profile", "description": "Menu to display a user's entire profile",
"display_key": "ussd.display_user_metadata", "display_key": "ussd.kenya.display_user_metadata",
"name": "display_user_metadata", "name": "display_user_metadata",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"41": { "41": {
"description": "The recipient is not in the system", "description": "The recipient is not in the system",
"display_key": "ussd.exit_invalid_recipient", "display_key": "ussd.kenya.exit_invalid_recipient",
"name": "exit_invalid_recipient", "name": "exit_invalid_recipient",
"parent": null "parent": null
}, },
"42": { "42": {
"description": "Pin entry menu for changing name data.", "description": "Pin entry menu for changing name data.",
"display_key": "ussd.name_edit_pin_authorization", "display_key": "ussd.kenya.name_edit_pin_authorization",
"name": "name_edit_pin_authorization", "name": "name_edit_pin_authorization",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"43": { "43": {
"description": "Pin entry menu for changing gender data.", "description": "Pin entry menu for changing gender data.",
"display_key": "ussd.gender_edit_pin_authorization", "display_key": "ussd.kenya.gender_edit_pin_authorization",
"name": "gender_edit_pin_authorization", "name": "gender_edit_pin_authorization",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"44": { "44": {
"description": "Pin entry menu for changing location data.", "description": "Pin entry menu for changing location data.",
"display_key": "ussd.location_edit_pin_authorization", "display_key": "ussd.kenya.location_edit_pin_authorization",
"name": "location_edit_pin_authorization", "name": "location_edit_pin_authorization",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"45": { "45": {
"description": "Pin entry menu for changing products data.", "description": "Pin entry menu for changing products data.",
"display_key": "ussd.products_edit_pin_authorization", "display_key": "ussd.kenya.products_edit_pin_authorization",
"name": "products_edit_pin_authorization", "name": "products_edit_pin_authorization",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"46": { "46": {
"description": "Pin confirmation for pin change.", "description": "Pin confirmation for pin change.",
"display_key": "ussd.new_pin_confirmation", "display_key": "ussd.kenya.new_pin_confirmation",
"name": "new_pin_confirmation", "name": "new_pin_confirmation",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"47": { "47": {
"description": "Year of birth entry menu.", "description": "Year of birth entry menu.",
"display_key": "ussd.enter_date_of_birth", "display_key": "ussd.kenya.enter_date_of_birth",
"name": "enter_date_of_birth", "name": "enter_date_of_birth",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"48": { "48": {
"description": "Pin entry menu for changing year of birth data.", "description": "Pin entry menu for changing year of birth data.",
"display_key": "ussd.dob_edit_pin_authorization", "display_key": "ussd.kenya.dob_edit_pin_authorization",
"name": "dob_edit_pin_authorization", "name": "dob_edit_pin_authorization",
"parent": "metadata_management" "parent": "metadata_management"
}, },
"49": { "49": {
"description": "Menu to display first set of tokens in the account's token list.", "description": "Menu to display first set of tokens in the account's token list.",
"display_key": "ussd.first_account_tokens_set", "display_key": "ussd.kenya.first_account_tokens_set",
"name": "first_account_tokens_set", "name": "first_account_tokens_set",
"parent": "start" "parent": null
}, },
"50": { "50": {
"description": "Menu to display middle set of tokens in the account's token list.", "description": "Menu to display middle set of tokens in the account's token list.",
"display_key": "ussd.middle_account_tokens_set", "display_key": "ussd.kenya.middle_account_tokens_set",
"name": "middle_account_tokens_set", "name": "middle_account_tokens_set",
"parent": null "parent": null
}, },
"51": { "51": {
"description": "Menu to display last set of tokens in the account's token list.", "description": "Menu to display last set of tokens in the account's token list.",
"display_key": "ussd.last_account_tokens_set", "display_key": "ussd.kenya.last_account_tokens_set",
"name": "last_account_tokens_set", "name": "last_account_tokens_set",
"parent": null "parent": null
}, },
"52": { "52": {
"description": "Pin entry menu for setting an active token.", "description": "Pin entry menu for setting an active token.",
"display_key": "ussd.token_selection_pin_authorization", "display_key": "ussd.kenya.token_selection_pin_authorization",
"name": "token_selection_pin_authorization", "name": "token_selection_pin_authorization",
"parent": "first_account_tokens_set" "parent": null
}, },
"53": { "53": {
"description": "Exit following a successful active token setting.", "description": "Exit following a successful active token setting.",
"display_key": "ussd.exit_successful_token_selection", "display_key": "ussd.kenya.exit_successful_token_selection",
"name": "exit_successful_token_selection", "name": "exit_successful_token_selection",
"parent": null "parent": null
}, },
"54": { "54": {
"description": "Pin management menu for operations related to an account's pin.", "description": "Pin management menu for operations related to an account's pin.",
"display_key": "ussd.pin_management", "display_key": "ussd.kenya.pin_management",
"name": "pin_management", "name": "pin_management",
"parent": "start" "parent": "start"
}, },
"55": { "55": {
"description": "Phone number entry for account whose pin is being reset.", "description": "Phone number entry for account whose pin is being reset.",
"display_key": "ussd.reset_guarded_pin", "display_key": "ussd.kenya.reset_guarded_pin",
"name": "reset_guarded_pin", "name": "reset_guarded_pin",
"parent": "pin_management" "parent": "pin_management"
}, },
"56": { "56": {
"description": "Pin entry for initiating request to reset an account's pin.", "description": "Pin entry for initiating request to reset an account's pin.",
"display_key": "ussd.reset_guarded_pin_authorization", "display_key": "ussd.kenya.reset_guarded_pin_authorization",
"name": "reset_guarded_pin_authorization", "name": "reset_guarded_pin_authorization",
"parent": "pin_management" "parent": "pin_management"
}, },
"57": { "57": {
"description": "Exit menu following successful pin reset initiation.", "description": "Exit menu following successful pin reset initiation.",
"display_key": "ussd.exit_pin_reset_initiated_success", "display_key": "ussd.kenya.exit_pin_reset_initiated_success",
"name": "exit_pin_reset_initiated_success", "name": "exit_pin_reset_initiated_success",
"parent": "pin_management" "parent": "pin_management"
}, },
"58": { "58": {
"description": "Exit menu in the event that an account is not a set guardian.", "description": "Exit menu in the event that an account is not a set guardian.",
"display_key": "ussd.exit_not_authorized_for_pin_reset", "display_key": "ussd.kenya.exit_not_authorized_for_pin_reset",
"name": "exit_not_authorized_for_pin_reset", "name": "exit_not_authorized_for_pin_reset",
"parent": "pin_management" "parent": "pin_management"
}, },
"59": { "59": {
"description": "Pin guard menu for handling guardianship operations.", "description": "Pin guard menu for handling guardianship operations.",
"display_key": "ussd.guard_pin", "display_key": "ussd.kenya.guard_pin",
"name": "guard_pin", "name": "guard_pin",
"parent": "pin_management" "parent": "pin_management"
}, },
"60": { "60": {
"description": "Pin entry to display a list of set guardians.", "description": "Pin entry to display a list of set guardians.",
"display_key": "ussd.guardian_list_pin_authorization", "display_key": "ussd.kenya.guardian_list_pin_authorization",
"name": "guardian_list_pin_authorization", "name": "guardian_list_pin_authorization",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"61": { "61": {
"description": "Menu to display list of set guardians.", "description": "Menu to display list of set guardians.",
"display_key": "ussd.guardian_list", "display_key": "ussd.kenya.guardian_list",
"name": "guardian_list", "name": "guardian_list",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"62": { "62": {
"description": "Phone number entry to add an account as a guardian to reset pin.", "description": "Phone number entry to add an account as a guardian to reset pin.",
"display_key": "ussd.add_guardian", "display_key": "ussd.kenya.add_guardian",
"name": "add_guardian", "name": "add_guardian",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"63": { "63": {
"description": "Pin entry to confirm addition of an account as a guardian.", "description": "Pin entry to confirm addition of an account as a guardian.",
"display_key": "ussd.add_guardian_pin_authorization", "display_key": "ussd.kenya.add_guardian_pin_authorization",
"name": "add_guardian_pin_authorization", "name": "add_guardian_pin_authorization",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"64": { "64": {
"description": "Exit menu when an account is successfully added as pin reset guardian.", "description": "Exit menu when an account is successfully added as pin reset guardian.",
"display_key": "ussd.exit_guardian_addition_success", "display_key": "ussd.kenya.exit_guardian_addition_success",
"name": "exit_guardian_addition_success", "name": "exit_guardian_addition_success",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"65": { "65": {
"description": "Phone number entry to remove an account as a guardian to reset pin.", "description": "Phone number entry to remove an account as a guardian to reset pin.",
"display_key": "ussd.remove_guardian", "display_key": "ussd.kenya.remove_guardian",
"name": "remove_guardian", "name": "remove_guardian",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"66": { "66": {
"description": "Pin entry to confirm removal of an account as a guardian.", "description": "Pin entry to confirm removal of an account as a guardian.",
"display_key": "ussd.remove_guardian_pin_authorization", "display_key": "ussd.kenya.remove_guardian_pin_authorization",
"name": "remove_guardian_pin_authorization", "name": "remove_guardian_pin_authorization",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"67": { "67": {
"description": "Exit menu when an account is successfully removed as pin reset guardian.", "description": "Exit menu when an account is successfully removed as pin reset guardian.",
"display_key": "ussd.exit_guardian_removal_success", "display_key": "ussd.kenya.exit_guardian_removal_success",
"name": "exit_guardian_removal_success", "name": "exit_guardian_removal_success",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"68": { "68": {
"description": "Exit menu when invalid phone number entry for guardian addition.", "description": "Exit menu when invalid phone number entry for guardian addition. ",
"display_key": "ussd.exit_invalid_guardian_addition", "display_key": "ussd.kenya.exit_invalid_guardian_addition",
"name": "exit_invalid_guardian_addition", "name": "exit_invalid_guardian_addition",
"parent": "guard_pin" "parent": "guard_pin"
}, },
"69": { "69": {
"description": "Exit menu when invalid phone number entry for guardian removal.", "description": "Exit menu when invalid phone number entry for guardian removal. ",
"display_key": "ussd.exit_invalid_guardian_removal", "display_key": "ussd.kenya.exit_invalid_guardian_removal",
"name": "exit_invalid_guardian_removal", "name": "exit_invalid_guardian_removal",
"parent": "guard_pin" "parent": "guard_pin"
},
"70": {
"description": "Menu to display middle set of languages to select.",
"display_key": "ussd.initial_middle_language_set",
"name": "initial_middle_language_set",
"parent": null
},
"71": {
"description": "Menu to display last set of languages to select.",
"display_key": "ussd.initial_last_language_set",
"name": "initial_last_language_set",
"parent": null
},
"72": {
"description": "Menu to display middle set of languages to select.",
"display_key": "ussd.middle_language_set",
"name": "middle_language_set",
"parent": null
},
"73": {
"description": "Menu to display last set of languages to select.",
"display_key": "ussd.last_language_set",
"name": "last_language_set",
"parent": null
} }
} }
} }

View File

@ -52,5 +52,4 @@ class UnknownUssdRecipient(Exception):
"""Raised when a recipient of a transaction is not known to the ussd application.""" """Raised when a recipient of a transaction is not known to the ussd application."""
class MaxRetryReached(Exception):
"""Raised when the maximum number of retries defined for polling for the availability of a resource."""

View File

@ -7,4 +7,3 @@ from .custom import CustomMetadata
from .person import PersonMetadata from .person import PersonMetadata
from .phone import PhonePointerMetadata from .phone import PhonePointerMetadata
from .preferences import PreferencesMetadata from .preferences import PreferencesMetadata
from .tokens import TokenMetadata

View File

@ -19,34 +19,34 @@ from cic_ussd.account.metadata import get_cached_preferred_language
from cic_ussd.account.statement import ( from cic_ussd.account.statement import (
get_cached_statement, get_cached_statement,
parse_statement_transactions, parse_statement_transactions,
query_statement) query_statement,
statement_transaction_set
)
from cic_ussd.account.tokens import (create_account_tokens_list, from cic_ussd.account.tokens import (create_account_tokens_list,
get_active_token_symbol, get_active_token_symbol,
get_cached_token_data, get_cached_token_data,
get_cached_token_symbol_list, get_cached_token_symbol_list,
get_cached_token_data_list, get_cached_token_data_list,
parse_token_list) parse_token_list,
token_list_set)
from cic_ussd.account.transaction import from_wei, to_wei from cic_ussd.account.transaction import from_wei, to_wei
from cic_ussd.cache import cache_data_key, cache_data, get_cached_data from cic_ussd.cache import cache_data_key, cache_data
from cic_ussd.db.models.account import Account from cic_ussd.db.models.account import Account
from cic_ussd.metadata import PersonMetadata from cic_ussd.metadata import PersonMetadata
from cic_ussd.phone_number import Support from cic_ussd.phone_number import Support
from cic_ussd.processor.poller import wait_for_session_data from cic_ussd.processor.util import parse_person_metadata
from cic_ussd.processor.util import parse_person_metadata, ussd_menu_list
from cic_ussd.session.ussd_session import save_session_data from cic_ussd.session.ussd_session import save_session_data
from cic_ussd.state_machine.logic.language import preferred_langauge_from_selection
from cic_ussd.translation import translation_for from cic_ussd.translation import translation_for
from sqlalchemy.orm.session import Session from sqlalchemy.orm.session import Session
logg = logging.getLogger(__file__) logg = logging.getLogger(__name__)
class MenuProcessor: class MenuProcessor:
def __init__(self, account: Account, display_key: str, menu_name: str, session: Session, ussd_session: dict): def __init__(self, account: Account, display_key: str, menu_name: str, session: Session, ussd_session: dict):
self.account = account self.account = account
self.display_key = display_key self.display_key = display_key
if account: self.identifier = bytes.fromhex(self.account.blockchain_address)
self.identifier = bytes.fromhex(self.account.blockchain_address)
self.menu_name = menu_name self.menu_name = menu_name
self.session = session self.session = session
self.ussd_session = ussd_session self.ussd_session = ussd_session
@ -89,29 +89,36 @@ class MenuProcessor:
:rtype: :rtype:
""" """
cached_statement = get_cached_statement(self.account.blockchain_address) cached_statement = get_cached_statement(self.account.blockchain_address)
transaction_sets = []
if cached_statement:
statement = json.loads(cached_statement)
statement_transactions = parse_statement_transactions(statement)
transaction_sets = [statement_transactions[tx:tx + 3] for tx in range(0, len(statement_transactions), 3)]
preferred_language = get_cached_preferred_language(self.account.blockchain_address) preferred_language = get_cached_preferred_language(self.account.blockchain_address)
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
no_transaction_history = statement_transaction_set(preferred_language, transaction_sets)
first_transaction_set = no_transaction_history
middle_transaction_set = no_transaction_history
last_transaction_set = no_transaction_history
if transaction_sets:
first_transaction_set = statement_transaction_set(preferred_language, transaction_sets[0])
if len(transaction_sets) >= 2:
middle_transaction_set = statement_transaction_set(preferred_language, transaction_sets[1])
if len(transaction_sets) >= 3:
last_transaction_set = statement_transaction_set(preferred_language, transaction_sets[2])
statement_list = [] if self.display_key == 'ussd.kenya.first_transaction_set':
if cached_statement:
statement_list = parse_statement_transactions(statement=json.loads(cached_statement))
fallback = translation_for('helpers.no_transaction_history', preferred_language)
transaction_sets = ussd_menu_list(fallback=fallback, menu_list=statement_list, split=3)
if self.display_key == 'ussd.first_transaction_set':
return translation_for( return translation_for(
self.display_key, preferred_language, first_transaction_set=transaction_sets[0] self.display_key, preferred_language, first_transaction_set=first_transaction_set
) )
if self.display_key == 'ussd.middle_transaction_set': if self.display_key == 'ussd.kenya.middle_transaction_set':
return translation_for( return translation_for(
self.display_key, preferred_language, middle_transaction_set=transaction_sets[1] self.display_key, preferred_language, middle_transaction_set=middle_transaction_set
) )
if self.display_key == 'ussd.last_transaction_set': if self.display_key == 'ussd.kenya.last_transaction_set':
return translation_for( return translation_for(
self.display_key, preferred_language, last_transaction_set=transaction_sets[2] self.display_key, preferred_language, last_transaction_set=last_transaction_set
) )
def add_guardian_pin_authorization(self): def add_guardian_pin_authorization(self):
@ -122,7 +129,7 @@ class MenuProcessor:
preferred_language = get_cached_preferred_language(self.account.blockchain_address) preferred_language = get_cached_preferred_language(self.account.blockchain_address)
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
set_guardians = self.account.get_guardians()[:3] set_guardians = self.account.get_guardians()
if set_guardians: if set_guardians:
guardians_list = '' guardians_list = ''
guardians_list_header = translation_for('helpers.guardians_list_header', preferred_language) guardians_list_header = translation_for('helpers.guardians_list_header', preferred_language)
@ -138,30 +145,36 @@ class MenuProcessor:
def account_tokens(self) -> str: def account_tokens(self) -> str:
cached_token_data_list = get_cached_token_data_list(self.account.blockchain_address) cached_token_data_list = get_cached_token_data_list(self.account.blockchain_address)
token_data_list = parse_token_list(cached_token_data_list) token_data_list = parse_token_list(cached_token_data_list)
token_list_sets = [token_data_list[tds:tds + 3] for tds in range(0, len(token_data_list), 3)]
preferred_language = get_cached_preferred_language(self.account.blockchain_address) preferred_language = get_cached_preferred_language(self.account.blockchain_address)
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
no_token_list = token_list_set(preferred_language, [])
first_account_tokens_set = no_token_list
middle_account_tokens_set = no_token_list
last_account_tokens_set = no_token_list
if token_list_sets:
data = {
'account_tokens_list': cached_token_data_list
}
save_session_data(data=data, queue='cic-ussd', session=self.session, ussd_session=self.ussd_session)
first_account_tokens_set = token_list_set(preferred_language, token_list_sets[0])
fallback = translation_for('helpers.no_tokens_list', preferred_language) if len(token_list_sets) >= 2:
token_list_sets = ussd_menu_list(fallback=fallback, menu_list=token_data_list, split=3) middle_account_tokens_set = token_list_set(preferred_language, token_list_sets[1])
if len(token_list_sets) >= 3:
data = { last_account_tokens_set = token_list_set(preferred_language, token_list_sets[2])
'account_tokens_list': cached_token_data_list if self.display_key == 'ussd.kenya.first_account_tokens_set':
}
save_session_data(data=data, queue='cic-ussd', session=self.session, ussd_session=self.ussd_session)
if self.display_key == 'ussd.first_account_tokens_set':
return translation_for( return translation_for(
self.display_key, preferred_language, first_account_tokens_set=token_list_sets[0] self.display_key, preferred_language, first_account_tokens_set=first_account_tokens_set
) )
if self.display_key == 'ussd.middle_account_tokens_set': if self.display_key == 'ussd.kenya.middle_account_tokens_set':
return translation_for( return translation_for(
self.display_key, preferred_language, middle_account_tokens_set=token_list_sets[1] self.display_key, preferred_language, middle_account_tokens_set=middle_account_tokens_set
) )
if self.display_key == 'ussd.last_account_tokens_set': if self.display_key == 'ussd.kenya.last_account_tokens_set':
return translation_for( return translation_for(
self.display_key, preferred_language, last_account_tokens_set=token_list_sets[2] self.display_key, preferred_language, last_account_tokens_set=last_account_tokens_set
) )
def help(self) -> str: def help(self) -> str:
@ -209,7 +222,7 @@ class MenuProcessor:
remaining_attempts = 3 remaining_attempts = 3
remaining_attempts -= self.account.failed_pin_attempts remaining_attempts -= self.account.failed_pin_attempts
retry_pin_entry = translation_for( retry_pin_entry = translation_for(
'ussd.retry_pin_entry', preferred_language, remaining_attempts=remaining_attempts 'ussd.kenya.retry_pin_entry', preferred_language, remaining_attempts=remaining_attempts
) )
return translation_for( return translation_for(
f'{self.display_key}.retry', preferred_language, retry_pin_entry=retry_pin_entry f'{self.display_key}.retry', preferred_language, retry_pin_entry=retry_pin_entry
@ -225,38 +238,6 @@ class MenuProcessor:
guardian = Account.get_by_phone_number(guardian_phone_number, self.session) guardian = Account.get_by_phone_number(guardian_phone_number, self.session)
return guardian.standard_metadata_id() return guardian.standard_metadata_id()
def language(self):
key = cache_data_key('system:languages'.encode('utf-8'), MetadataPointer.NONE)
cached_system_languages = get_cached_data(key)
language_list: list = json.loads(cached_system_languages)
if self.account:
preferred_language = get_cached_preferred_language(self.account.blockchain_address)
else:
preferred_language = i18n.config.get('fallback')
fallback = translation_for('helpers.no_language_list', preferred_language)
language_list_sets = ussd_menu_list(fallback=fallback, menu_list=language_list, split=3)
if self.display_key in ['ussd.initial_language_selection', 'ussd.select_preferred_language']:
return translation_for(
self.display_key, preferred_language, first_language_set=language_list_sets[0]
)
if 'middle_language_set' in self.display_key:
return translation_for(
self.display_key, preferred_language, middle_language_set=language_list_sets[1]
)
if 'last_language_set' in self.display_key:
return translation_for(
self.display_key, preferred_language, last_language_set=language_list_sets[2]
)
def account_creation_prompt(self):
preferred_language = preferred_langauge_from_selection(self.ussd_session.get('user_input'))
return translation_for(self.display_key, preferred_language)
def reset_guarded_pin_authorization(self): def reset_guarded_pin_authorization(self):
guarded_account_information = self.guarded_account_metadata() guarded_account_information = self.guarded_account_metadata()
return self.pin_authorization(guarded_account_information=guarded_account_information) return self.pin_authorization(guarded_account_information=guarded_account_information)
@ -400,9 +381,8 @@ class MenuProcessor:
) )
def exit_invalid_menu_option(self): def exit_invalid_menu_option(self):
if self.account: preferred_language = get_cached_preferred_language(self.account.blockchain_address)
preferred_language = get_cached_preferred_language(self.account.blockchain_address) if not preferred_language:
else:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
return translation_for(self.display_key, preferred_language, support_phone=Support.phone_number) return translation_for(self.display_key, preferred_language, support_phone=Support.phone_number)
@ -410,7 +390,7 @@ class MenuProcessor:
preferred_language = get_cached_preferred_language(self.account.blockchain_address) preferred_language = get_cached_preferred_language(self.account.blockchain_address)
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
return translation_for('ussd.exit_pin_blocked', preferred_language, support_phone=Support.phone_number) return translation_for('ussd.kenya.exit_pin_blocked', preferred_language, support_phone=Support.phone_number)
def exit_successful_token_selection(self) -> str: def exit_successful_token_selection(self) -> str:
selected_token = self.ussd_session.get('data').get('selected_token') selected_token = self.ussd_session.get('data').get('selected_token')
@ -418,7 +398,7 @@ class MenuProcessor:
preferred_language = get_cached_preferred_language(self.account.blockchain_address) preferred_language = get_cached_preferred_language(self.account.blockchain_address)
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
return translation_for(self.display_key, preferred_language, token_symbol=token_symbol) return translation_for(self.display_key,preferred_language,token_symbol=token_symbol)
def exit_successful_transaction(self): def exit_successful_transaction(self):
""" """
@ -465,9 +445,6 @@ def response(account: Account, display_key: str, menu_name: str, session: Sessio
""" """
menu_processor = MenuProcessor(account, display_key, menu_name, session, ussd_session) menu_processor = MenuProcessor(account, display_key, menu_name, session, ussd_session)
if menu_name == 'account_creation_prompt':
return menu_processor.account_creation_prompt()
if menu_name == 'start': if menu_name == 'start':
return menu_processor.start_menu() return menu_processor.start_menu()
@ -525,9 +502,6 @@ def response(account: Account, display_key: str, menu_name: str, session: Sessio
if 'account_tokens_set' in menu_name: if 'account_tokens_set' in menu_name:
return menu_processor.account_tokens() return menu_processor.account_tokens()
if 'language' in menu_name:
return menu_processor.language()
if menu_name == 'display_user_metadata': if menu_name == 'display_user_metadata':
return menu_processor.person_metadata() return menu_processor.person_metadata()
@ -541,4 +515,5 @@ def response(account: Account, display_key: str, menu_name: str, session: Sessio
return menu_processor.exit_successful_token_selection() return menu_processor.exit_successful_token_selection()
preferred_language = get_cached_preferred_language(account.blockchain_address) preferred_language = get_cached_preferred_language(account.blockchain_address)
return translation_for(display_key, preferred_language) return translation_for(display_key, preferred_language)

View File

@ -1,104 +0,0 @@
# standard imports
import logging
import time
from queue import Queue
from typing import Callable, Dict, Optional, Tuple, Union
# external imports
from cic_types.condiments import MetadataPointer
# local imports
from cic_ussd.cache import cache_data_key, get_cached_data
from cic_ussd.error import MaxRetryReached
logg = logging.getLogger()
# adapted from https://github.com/justiniso/polling/blob/master/polling.py
# opted not to use the package to reduce dependency
def poller(args: Optional[Tuple],
interval: int,
kwargs: Optional[Dict],
max_retry: int,
target: Callable[..., Union[Dict, str]]):
""""""
collected_values: list = []
expected_value = None
tries = 0
while True:
if tries >= max_retry:
raise MaxRetryReached(collected_values, expected_value)
try:
if args:
value = target(*args)
elif kwargs:
value = target(**kwargs)
else:
value = target()
expected_value = value
except () as error:
expected_value = error
else:
if bool(value) or value == {}:
logg.debug(f'Resource: {expected_value} now available.')
break
collected_values.append(expected_value)
logg.debug(f'Collected values are: {collected_values}')
tries += 1
time.sleep(interval)
def wait_for_cache(identifier: Union[list, bytes],
resource_name: str,
salt: MetadataPointer,
interval: int = 1,
max_retry: int = 5):
"""
:param identifier:
:type identifier:
:param interval:
:type interval:
:param resource_name:
:type resource_name:
:param salt:
:type salt:
:param max_retry:
:type max_retry:
:return:
:rtype:
"""
key: str = cache_data_key(identifier=identifier, salt=salt)
logg.debug(f'Polling for resource: {resource_name} at: {key} every: {interval} second(s) for {max_retry} seconds.')
poller(args=(key,), interval=interval, kwargs=None, max_retry=max_retry, target=get_cached_data)
def wait_for_session_data(resource_name: str,
session_data_key: str,
ussd_session: dict,
interval: int = 1,
max_retry: int = 5):
"""
:param interval:
:type interval:
:param resource_name:
:type resource_name:
:param session_data_key:
:type session_data_key:
:param ussd_session:
:type ussd_session:
:param max_retry:
:type max_retry:
:return:
:rtype:
"""
# poll for data element first
logg.debug(f'Data poller with max retry at: {max_retry}. Checking for every: {interval} seconds.')
poller(args=('data',), interval=interval, kwargs=None, max_retry=max_retry, target=ussd_session.get)
# poll for session data element
get_session_data = ussd_session.get('data').get
logg.debug(f'Session data poller for: {resource_name} with max retry at: {max_retry}. Checking for every: {interval} seconds.')
poller(args=(session_data_key,), interval=interval, kwargs=None, max_retry=max_retry, target=get_session_data)

View File

@ -8,7 +8,7 @@ from sqlalchemy.orm.session import Session
from tinydb.table import Document from tinydb.table import Document
# local imports # local imports
from cic_ussd.db.models.account import Account from cic_ussd.db.models.account import Account, create
from cic_ussd.db.models.base import SessionBase from cic_ussd.db.models.base import SessionBase
from cic_ussd.db.models.ussd_session import UssdSession from cic_ussd.db.models.ussd_session import UssdSession
from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.menu.ussd_menu import UssdMenu
@ -16,6 +16,7 @@ from cic_ussd.processor.menu import response
from cic_ussd.processor.util import latest_input, resume_last_ussd_session from cic_ussd.processor.util import latest_input, resume_last_ussd_session
from cic_ussd.session.ussd_session import create_or_update_session, persist_ussd_session from cic_ussd.session.ussd_session import create_or_update_session, persist_ussd_session
from cic_ussd.state_machine import UssdStateMachine from cic_ussd.state_machine import UssdStateMachine
from cic_ussd.translation import translation_for
from cic_ussd.validator import is_valid_response from cic_ussd.validator import is_valid_response
@ -35,6 +36,9 @@ def handle_menu(account: Account, session: Session) -> Document:
last_ussd_session = UssdSession.last_ussd_session(account.phone_number, session) last_ussd_session = UssdSession.last_ussd_session(account.phone_number, session)
if last_ussd_session: if last_ussd_session:
return resume_last_ussd_session(last_ussd_session.state) return resume_last_ussd_session(last_ussd_session.state)
elif not account.has_preferred_language():
return UssdMenu.find_by_name('initial_language_selection')
else: else:
return UssdMenu.find_by_name('initial_pin_entry') return UssdMenu.find_by_name('initial_pin_entry')
@ -67,13 +71,16 @@ def get_menu(account: Account,
return UssdMenu.find_by_name(state) return UssdMenu.find_by_name(state)
def handle_menu_operations(external_session_id: str, def handle_menu_operations(chain_str: str,
external_session_id: str,
phone_number: str, phone_number: str,
queue: str, queue: str,
service_code: str, service_code: str,
session, session,
user_input: str): user_input: str):
""" """
:param chain_str:
:type chain_str:
:param external_session_id: :param external_session_id:
:type external_session_id: :type external_session_id:
:param phone_number: :param phone_number:
@ -93,38 +100,10 @@ def handle_menu_operations(external_session_id: str,
account: Account = Account.get_by_phone_number(phone_number, session) account: Account = Account.get_by_phone_number(phone_number, session)
if account: if account:
return handle_account_menu_operations(account, external_session_id, queue, session, service_code, user_input) return handle_account_menu_operations(account, external_session_id, queue, session, service_code, user_input)
else: create(chain_str, phone_number, session)
return handle_no_account_menu_operations( menu = UssdMenu.find_by_name('account_creation_prompt')
account, external_session_id, phone_number, queue, session, service_code, user_input) preferred_language = i18n.config.get('fallback')
create_or_update_session(
def handle_no_account_menu_operations(account: Optional[Account],
external_session_id: str,
phone_number: str,
queue: str,
session: Session,
service_code: str,
user_input: str):
"""
:param account:
:type account:
:param external_session_id:
:type external_session_id:
:param phone_number:
:type phone_number:
:param queue:
:type queue:
:param session:
:type session:
:param service_code:
:type service_code:
:param user_input:
:type user_input:
:return:
:rtype:
"""
menu = UssdMenu.find_by_name('initial_language_selection')
ussd_session = create_or_update_session(
external_session_id=external_session_id, external_session_id=external_session_id,
msisdn=phone_number, msisdn=phone_number,
service_code=service_code, service_code=service_code,
@ -132,20 +111,7 @@ def handle_no_account_menu_operations(account: Optional[Account],
session=session, session=session,
user_input=user_input) user_input=user_input)
persist_ussd_session(external_session_id, queue) persist_ussd_session(external_session_id, queue)
last_ussd_session: UssdSession = UssdSession.last_ussd_session(phone_number, session) return translation_for('ussd.kenya.account_creation_prompt', preferred_language)
if last_ussd_session:
if not user_input:
menu = resume_last_ussd_session(last_ussd_session.state)
else:
session = SessionBase.bind_session(session)
state = next_state(account, session, user_input, last_ussd_session.to_json())
menu = UssdMenu.find_by_name(state)
return response(account=account,
display_key=menu.get('display_key'),
menu_name=menu.get('name'),
session=session,
ussd_session=ussd_session.to_json())
def handle_account_menu_operations(account: Account, def handle_account_menu_operations(account: Account,
@ -186,12 +152,15 @@ def handle_account_menu_operations(account: Account,
if last_ussd_session: if last_ussd_session:
ussd_session = create_or_update_session( ussd_session = create_or_update_session(
external_session_id, phone_number, service_code, user_input, menu.get('name'), session, external_session_id, phone_number, service_code, user_input, menu.get('name'), session,
last_ussd_session.data) last_ussd_session.data
)
else: else:
ussd_session = create_or_update_session( ussd_session = create_or_update_session(
external_session_id, phone_number, service_code, user_input, menu.get('name'), session, {}) external_session_id, phone_number, service_code, user_input, menu.get('name'), session, None
)
menu_response = response( menu_response = response(
account, menu.get('display_key'), menu.get('name'), session, ussd_session.to_json()) account, menu.get('display_key'), menu.get('name'), session, ussd_session.to_json()
)
if not is_valid_response(menu_response): if not is_valid_response(menu_response):
raise ValueError(f'Invalid response: {response}') raise ValueError(f'Invalid response: {response}')
persist_ussd_session(external_session_id, queue) persist_ussd_session(external_session_id, queue)

Some files were not shown because too many files have changed in this diff Show More