Compare commits

...

113 Commits

Author SHA1 Message Date
nolash
274222c44c Add chain interface 2021-07-02 11:58:28 +02:00
nolash
59c422c5e7 WIP fix traffic script imports 2021-07-02 11:37:08 +02:00
Louis Holbrook
a075c55957 Merge branch 'lash/allowance' into 'master'
Add allowance check and transferFrom task

See merge request grassrootseconomics/cic-internal-integration!203
2021-06-30 18:15:40 +00:00
Louis Holbrook
6464f651ec Add allowance check and transferFrom task 2021-06-30 18:15:40 +00:00
Louis Holbrook
5145282946 Merge branch 'lash/faucet-verif' into 'master'
Consider faucet amount in verify balance

See merge request grassrootseconomics/cic-internal-integration!206
2021-06-30 18:14:53 +00:00
nolash
1e87f2ed31 Consider faucet amount in verify balance 2021-06-30 17:22:26 +02:00
Louis Holbrook
c852f41d76 Merge branch 'lash/move-to-chainlib-eth' into 'master'
Move to chainlib-eth dependency

See merge request grassrootseconomics/cic-internal-integration!199
2021-06-30 14:44:18 +00:00
Louis Holbrook
f8e68cff96 Move to chainlib-eth dependency 2021-06-30 14:44:17 +00:00
7027d77836 Merge branch 'philip/refactor-integration-tests' into 'master'
Philip/refactor integration tests

See merge request grassrootseconomics/cic-internal-integration!182
2021-06-30 14:27:56 +00:00
d356f8167d Philip/refactor integration tests 2021-06-30 14:27:56 +00:00
753d21fe95 Merge branch 'bvander/data-seeding-build-improvements' into 'master'
uses the new base image

See merge request grassrootseconomics/cic-internal-integration!204
2021-06-29 17:17:18 +00:00
3b6e031746 Merge branch 'philip/db-session-management' into 'master'
Philip/db session management

Closes cic-ussd#53

See merge request grassrootseconomics/cic-internal-integration!193
2021-06-29 10:49:25 +00:00
b1d5d45eef Philip/db session management 2021-06-29 10:49:25 +00:00
53317cb912 Merge branch 'philip/notify-queue-setting' into 'master'
Enable api level setting of queue value.

See merge request grassrootseconomics/cic-internal-integration!184
2021-06-29 10:21:43 +00:00
18382a1f35 Merge branch 'bvander/integration-notes' into 'master'
integration updates: meta imports

See merge request grassrootseconomics/cic-internal-integration!201
2021-06-28 22:40:54 +00:00
29e91fafab integration updates: meta imports 2021-06-28 22:40:54 +00:00
5b20a9a24a Merge branch 'bvander/better-data-seeding-container' into 'master'
add key store and vars

See merge request grassrootseconomics/cic-internal-integration!202
2021-06-28 22:40:04 +00:00
a252195bdc uses the new base image 2021-06-28 10:56:15 -07:00
Spencer Ofwiti
f1be3b633c Merge branch 'lash/downgrade-chainsyncer' into 'master'
Downgrade chainsyncer

See merge request grassrootseconomics/cic-internal-integration!200
2021-06-28 15:36:56 +00:00
nolash
e59a71188c Downgrade chainsyncer 2021-06-28 17:18:52 +02:00
1d0eb06f2f Merge branch 'bvander/data-seeding-keystore' into 'master'
add keystore

See merge request grassrootseconomics/cic-internal-integration!197
2021-06-26 17:12:46 +00:00
57127132b5 add keystore 2021-06-26 09:50:58 -07:00
0bf2c35fcd add key store and vars 2021-06-26 09:50:03 -07:00
d046595764 Merge branch 'philip/remove-hardcoded-import-script-configs' into 'master'
Refactors hardcoded config vars.

See merge request grassrootseconomics/cic-internal-integration!196
2021-06-26 15:58:07 +00:00
9dd7ec88fd Refactors hardcoded config vars. 2021-06-26 18:51:25 +03:00
282fd2ff52 Merge branch 'bvander/better-data-seeding-container' into 'master'
fix paths for data seeding container

See merge request grassrootseconomics/cic-internal-integration!195
2021-06-25 23:06:25 +00:00
8f85598861 fix paths and stuff 2021-06-25 15:59:10 -07:00
8529c349ca Merge branch 'philip/fix-import-script-pointers' into 'master'
Philip/fix import script pointers

See merge request grassrootseconomics/cic-internal-integration!194
2021-06-25 15:36:06 +00:00
4368d2bf59 Philip/fix import script pointers 2021-06-25 15:36:05 +00:00
da3c812bf5 Merge branch 'philip/add-age-metadata' into 'master'
Philip/add age metadata

See merge request grassrootseconomics/cic-internal-integration!186
2021-06-23 13:25:09 +00:00
82b1e87462 Philip/add age metadata 2021-06-23 13:25:09 +00:00
e13c423daf Merge branch 'philip/custom-metadata' into 'master'
Philip/custom metadata

See merge request grassrootseconomics/cic-internal-integration!192
2021-06-23 08:54:34 +00:00
56b3bd751d Philip/custom metadata 2021-06-23 08:54:34 +00:00
4f41c5bacf Merge branch 'philip/notify-sender-id-resolution' into 'master'
Handle empty string defaults in kubernetes secrets.

See merge request grassrootseconomics/cic-internal-integration!183
2021-06-23 07:02:22 +00:00
07583f0c3b Handle empty string defaults in kubernetes secrets. 2021-06-23 07:02:22 +00:00
0ae912082c Merge branch 'philip/refactor-phone-number-input-handling' into 'master'
Refactors handling of phone number inputs during transactions

See merge request grassrootseconomics/cic-internal-integration!185
2021-06-23 06:44:01 +00:00
094f4d4298 Refactors handling of phone number inputs during transactions 2021-06-23 06:44:01 +00:00
Spencer Ofwiti
9471b1d8ab Merge branch 'spencer/fix-import-scripts-build' into 'master'
Fix build process for import scripts.

See merge request grassrootseconomics/cic-internal-integration!188
2021-06-23 05:49:06 +00:00
Spencer Ofwiti
57100366d8 Fix build process for import scripts. 2021-06-23 05:49:06 +00:00
71e0973020 Merge branch 'lash/check-import-ussd' into 'master'
Rehabilitate ussd import scripts

Closes #57

See merge request grassrootseconomics/cic-internal-integration!166
2021-06-23 04:29:39 +00:00
Louis Holbrook
12ab5c2f66 Rehabilitate ussd import scripts 2021-06-23 04:29:38 +00:00
a804552620 Merge branch 'bvander/add-venv-to-ignore' into 'master'
add .venv folder

See merge request grassrootseconomics/cic-internal-integration!191
2021-06-22 21:35:21 +00:00
0319fa6076 add .venv folder 2021-06-22 14:26:41 -07:00
91dfc51d54 Merge branch 'bvander/change-meta-key-dir' into 'master'
change pgp key dir

See merge request grassrootseconomics/cic-internal-integration!190
2021-06-22 21:25:58 +00:00
4fd861f080 change pgp key dir 2021-06-22 14:15:38 -07:00
Louis Holbrook
28de7a4eac Merge branch 'lash/loglines' into 'master'
Loglines and dep bump

See merge request grassrootseconomics/cic-internal-integration!187
2021-06-19 06:49:42 +00:00
Louis Holbrook
a31e79b0f7 Loglines and dep bump 2021-06-19 06:49:42 +00:00
eb2f71aee0 Enable api level setting of queue value. 2021-06-14 10:38:23 +03:00
Spencer Ofwiti
e5b1352970 Merge branch 'spencer/meta-cicd' into 'master'
Refactor meta ci-cd pipeline.

See merge request grassrootseconomics/cic-internal-integration!171
2021-06-07 16:11:03 +00:00
Spencer Ofwiti
89b90da5d2 Refactor meta ci-cd pipeline. 2021-06-07 16:11:03 +00:00
9607994c31 Merge branch 'philip/add-support-phone-number' into 'master'
Philip/add support phone number

See merge request grassrootseconomics/cic-internal-integration!175
2021-06-07 08:02:03 +00:00
0da617d29e Philip/add support phone number 2021-06-07 08:02:03 +00:00
56bcad16a5 Merge branch 'philip/refactor-metadata-entry' into 'master'
Philip/refactor signup steps.

See merge request grassrootseconomics/cic-internal-integration!173
2021-06-07 07:47:03 +00:00
77d9936e39 Philip/refactor signup steps. 2021-06-07 07:47:02 +00:00
Louis Holbrook
72aeefc78b Merge branch 'lash/simple-compose' into 'master'
Simplify docker compose setup

See merge request grassrootseconomics/cic-internal-integration!180
2021-06-04 20:10:16 +00:00
nolash
fab9b0c520 Add long timeout to first account create in contract migration part 2 2021-06-04 22:00:06 +02:00
9566f8c8e2 Merge branch 'philip/qfix-vbumps' into 'master'
Bumps cic-eth and cic-base versions.

See merge request grassrootseconomics/cic-internal-integration!179
2021-06-04 08:34:14 +00:00
007d7a5121 Bumps cic-eth and cic-base versions. 2021-06-04 11:12:04 +03:00
fc20849aff Merge branch 'bvander/contract-migration-requirements' into 'master'
pull the req out of the container and bump em

See merge request grassrootseconomics/cic-internal-integration!178
2021-06-03 18:54:19 +00:00
1605e53216 pull the req out of the container and bump em 2021-06-03 11:43:13 -07:00
200fdf0e3c Merge branch 'no-host-mount-startup' into 'master'
no local file mounts for config files

See merge request grassrootseconomics/cic-internal-integration!177
2021-06-03 17:22:49 +00:00
022db04198 no local file mounts for config files 2021-06-03 17:22:47 +00:00
1c17048981 Update README.md 2021-06-03 17:19:55 +00:00
Louis Holbrook
04c0963f33 Merge branch 'lash/tmp-check-chaintool' into 'master'
Update queue/syncer module structure

See merge request grassrootseconomics/cic-internal-integration!176
2021-06-03 13:51:55 +00:00
Louis Holbrook
096ed9bc27 Update queue/syncer module structure 2021-06-03 13:51:55 +00:00
1a931eced4 Merge branch 'philip/management-integration-tests' into 'master'
Philip/management integration tests

See merge request grassrootseconomics/cic-internal-integration!159
2021-06-03 13:40:51 +00:00
ed9e032890 Philip/management integration tests 2021-06-03 13:40:51 +00:00
Louis Holbrook
69ae9b7c07 Merge branch 'lash/update-imports-readme' into 'master'
Bring import readme up-to-date

See merge request grassrootseconomics/cic-internal-integration!167
2021-06-02 17:55:46 +00:00
Louis Holbrook
634d3fb401 Bring import readme up-to-date 2021-06-02 17:55:46 +00:00
Louis Holbrook
65f722b291 Merge branch 'lash/stale-import-deps' into 'master'
Upgrade cic-eth package for imports

See merge request grassrootseconomics/cic-internal-integration!169
2021-06-02 16:09:40 +00:00
Louis Holbrook
0ad0f9981c Upgrade cic-eth package for imports 2021-06-02 16:09:39 +00:00
Louis Holbrook
5fb0f4a2e9 Merge branch 'lash/horse-cart' into 'master'
Fix false offset limit error

Closes #60

See merge request grassrootseconomics/cic-internal-integration!174
2021-06-02 15:27:30 +00:00
nolash
41a96b5584 Int comparisons on block numbers in cic cache lookup 2021-06-02 17:11:15 +02:00
Geoff Turk
d0f2bc0120 Merge branch 'geoffturk/mock-data' into 'master'
Add better mocked data

See merge request grassrootseconomics/cic-internal-integration!172
2021-06-02 14:59:00 +00:00
Geoff Turk
e2946052e0 Add more better mocked data 2021-06-02 14:53:53 +02:00
Louis Holbrook
546d69f1e9 Merge branch 'lash/coveralls-that-coverall' into 'master'
cic-eth: Reach 90% test coverage

Closes cic-eth#125

See merge request grassrootseconomics/cic-internal-integration!168
2021-05-31 15:34:17 +00:00
Louis Holbrook
fbf7351238 cic-eth: Reach 90% test coverage 2021-05-31 15:34:16 +00:00
Geoff Turk
b886384fa8 Merge branch 'geoffturk/fix-traffic' into 'master'
Fix traffic script

See merge request grassrootseconomics/cic-internal-integration!170
2021-05-27 10:38:08 +00:00
Geoff Turk
277033f3b5 Fix traffic script 2021-05-27 10:38:07 +00:00
4ae094fd30 Merge branch 'cic-eth-unittest' into 'master'
Unit tests for cic-eth

See merge request grassrootseconomics/cic-internal-integration!164
2021-05-25 16:22:26 +00:00
cb239f112a Unit tests for cic-eth 2021-05-25 16:22:26 +00:00
Geoff Turk
d971a6eded Merge branch 'fix-meta-data-seeding' into 'master'
Fix path to PGP exports directory

See merge request grassrootseconomics/cic-internal-integration!165
2021-05-21 16:42:21 +00:00
Spencer Ofwiti
b0a6df0177 Merge branch 'spencer/metadata-identifiers' into 'master'
Add meta update cli tool.

See merge request grassrootseconomics/cic-internal-integration!138
2021-05-21 09:42:08 +00:00
Spencer Ofwiti
92c9df4e19 Add meta update cli tool. 2021-05-21 09:42:08 +00:00
Geoff Turk
9c49d568e0 Fix path to PGP exports directory 2021-05-21 10:51:32 +02:00
Louis Holbrook
d7113f3923 Merge branch 'lash/rehabilitate-traffic-2' into 'master'
Rehabilitate traffic generator script

See merge request grassrootseconomics/cic-internal-integration!145
2021-05-20 21:25:15 +00:00
Louis Holbrook
c569fe4b17 Rehabilitate traffic generator script 2021-05-20 21:25:14 +00:00
1c650df27d Merge branch 'bvander/move-scripts-to-e2e-folder' into 'master'
move files out of scripts folder to their own dir

See merge request grassrootseconomics/cic-internal-integration!137
2021-05-20 14:31:08 +00:00
a31b7bc9cd move files out of scripts folder to their own dir 2021-05-20 14:31:08 +00:00
Geoff Turk
78ff58c1a2 Merge branch 'geoff/data-success' into 'master'
Add success field to transactions_all_data

See merge request grassrootseconomics/cic-internal-integration!163
2021-05-20 10:28:07 +00:00
1676addbeb Merge branch 'philip/meta-cluster-bug' into 'master'
Refactors to handle error in metadata handling

Closes cic-ussd#45

See merge request grassrootseconomics/cic-internal-integration!160
2021-05-19 16:25:10 +00:00
1efc25ac15 Refactors to handle error in metadata handling 2021-05-19 16:25:10 +00:00
Louis Holbrook
db2ec0dcfa Merge branch 'philip/notify-errors' into 'master'
Philip/notify errors

Closes cic-notify#4

See merge request grassrootseconomics/cic-internal-integration!161
2021-05-19 16:13:06 +00:00
5148e6428b Philip/notify errors 2021-05-19 16:13:06 +00:00
Louis Holbrook
0c186ed968 Merge branch 'lash/rehabilitate-tests-eth' into 'master'
Fix outdated module names in cic-eth tests

See merge request grassrootseconomics/cic-internal-integration!162
2021-05-19 15:11:08 +00:00
Louis Holbrook
c44439bd90 Fix outdated module names in cic-eth tests 2021-05-19 15:11:08 +00:00
Geoff Turk
0411603078 Add success field to transactions_all_data 2021-05-19 16:47:22 +02:00
eee895ea71 Merge branch 'willruddick-master-patch-91858' into 'master'
small updates. note other gender

See merge request grassrootseconomics/cic-internal-integration!154
2021-05-19 10:26:52 +00:00
Louis Holbrook
a5ca898532 Merge branch 'lash/update-contracts-in-migration-2' into 'master'
imports: Fix stale dep in sovereign import users script

See merge request grassrootseconomics/cic-internal-integration!149
2021-05-19 09:57:03 +00:00
Louis Holbrook
6d8508aebf imports: Fix stale dep in sovereign import users script 2021-05-19 09:57:02 +00:00
Louis Holbrook
f8f66984d2 Merge branch 'lash/no-ussd-contamination' into 'master'
Isolate ussd-related data files in imports

See merge request grassrootseconomics/cic-internal-integration!150
2021-05-19 09:55:24 +00:00
Louis Holbrook
0f02dd1b7c Isolate ussd-related data files in imports 2021-05-19 09:55:24 +00:00
63a4a82ab0 Merge branch 'philip/replicate-wills-changes' into 'master'
Replicates changes in broken MR by will.

See merge request grassrootseconomics/cic-internal-integration!158
2021-05-19 08:25:19 +00:00
949c1070a9 Replicates changes in broken MR by will. 2021-05-19 11:19:29 +03:00
5d9fbe9b64 Merge branch 'willruddick-master-patch-28332' into 'master'
shortened and changed service code

See merge request grassrootseconomics/cic-internal-integration!152
2021-05-19 07:47:09 +00:00
873a3f082a shortened and changed service code 2021-05-19 07:47:09 +00:00
7b408cf564 Merge branch 'willruddick-master-patch-28450' into 'master'
small changes, note the 'other' gender

See merge request grassrootseconomics/cic-internal-integration!151
2021-05-19 07:33:37 +00:00
Louis Holbrook
9dfbd7034c Merge branch 'lash/decimals-in-api' into 'master'
cic-eth-tasker: Add decimals and token name to default token api call return struct

Closes cic-eth#123

See merge request grassrootseconomics/cic-internal-integration!148
2021-05-19 06:59:43 +00:00
Louis Holbrook
235f5cede8 cic-eth-tasker: Add decimals and token name to default token api call return struct 2021-05-19 06:59:42 +00:00
Geoff Turk
0a59539f9a Merge branch 'lash/cache-data-api' into 'master'
cic-cache: Add data API

Closes cic-cache#11

See merge request grassrootseconomics/cic-internal-integration!157
2021-05-18 17:13:57 +00:00
Louis Holbrook
60b36945df cic-cache: Add data API 2021-05-18 17:13:57 +00:00
e8512ebbae small updates. note other gender 2021-05-17 11:27:45 +00:00
f2c955c60b small changes, note the 'other' gender 2021-05-17 11:06:10 +00:00
232 changed files with 14400 additions and 1927 deletions

5
.gitignore vendored
View File

@@ -8,3 +8,8 @@ gmon.out
*.egg-info *.egg-info
dist/ dist/
build/ build/
**/*sqlite
**/.nyc_output
**/coverage
**/.venv
.idea

View File

@@ -6,6 +6,7 @@ include:
- local: 'apps/cic-notify/.gitlab-ci.yml' - local: 'apps/cic-notify/.gitlab-ci.yml'
- local: 'apps/cic-meta/.gitlab-ci.yml' - local: 'apps/cic-meta/.gitlab-ci.yml'
- local: 'apps/cic-cache/.gitlab-ci.yml' - local: 'apps/cic-cache/.gitlab-ci.yml'
- local: 'apps/data-seeding/.gitlab-ci.yml'
stages: stages:
- build - build

View File

@@ -1,22 +1,28 @@
# standard imports # standard imports
import logging import logging
import datetime
# third-party imports # external imports
import moolb import moolb
# local imports # local imports
from cic_cache.db import list_transactions_mined from cic_cache.db.list import (
from cic_cache.db import list_transactions_account_mined list_transactions_mined,
list_transactions_account_mined,
list_transactions_mined_with_data,
)
logg = logging.getLogger() logg = logging.getLogger()
class BloomCache: class Cache:
def __init__(self, session): def __init__(self, session):
self.session = session self.session = session
class BloomCache(Cache):
@staticmethod @staticmethod
def __get_filter_size(n): def __get_filter_size(n):
n = 8192 * 8 n = 8192 * 8
@@ -87,3 +93,44 @@ class BloomCache:
f_blocktx.add(block + tx) f_blocktx.add(block + tx)
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block)) logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),) return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
class DataCache(Cache):
def load_transactions_with_data(self, offset, end):
rows = list_transactions_mined_with_data(self.session, offset, end)
tx_cache = []
highest_block = -1;
lowest_block = -1;
date_is_str = None # stick this in startup
for r in rows:
if highest_block == -1:
highest_block = r['block_number']
lowest_block = r['block_number']
tx_type = 'unknown'
if r['value'] != None:
tx_type = '{}.{}'.format(r['domain'], r['value'])
if date_is_str == None:
date_is_str = type(r['date_block']).__name__ == 'str'
o = {
'block_number': r['block_number'],
'tx_hash': r['tx_hash'],
'date_block': r['date_block'],
'sender': r['sender'],
'recipient': r['recipient'],
'from_value': int(r['from_value']),
'to_value': int(r['to_value']),
'source_token': r['source_token'],
'destination_token': r['destination_token'],
'success': r['success'],
'tx_type': tx_type,
}
if date_is_str:
o['date_block'] = datetime.datetime.fromisoformat(r['date_block'])
tx_cache.append(o)
return (lowest_block, highest_block, tx_cache)

View File

@@ -28,6 +28,26 @@ def list_transactions_mined(
return r return r
def list_transactions_mined_with_data(
session,
offset,
end,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
r = session.execute(s)
return r
def list_transactions_account_mined( def list_transactions_account_mined(
session, session,
address, address,

View File

@@ -0,0 +1,110 @@
# standard imports
import logging
import json
import re
import base64
# local imports
from cic_cache.cache import (
BloomCache,
DataCache,
)
logg = logging.getLogger(__name__)
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
DEFAULT_LIMIT = 100
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
address = r[1]
if r[2] == None:
address = '0x' + address
offset = DEFAULT_LIMIT
if r.lastindex > 2:
offset = r[3]
limit = 0
if r.lastindex > 3:
limit = r[4]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_data(session, env):
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
if not r:
return None
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
return None
offset = r[1]
end = r[2]
if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude')
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end)
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
o = {
'low': lowest_block,
'high': highest_block,
'data': tx_cache,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)

View File

@@ -1,18 +1,20 @@
# standard imports # standard imports
import os import os
import re
import logging import logging
import argparse import argparse
import json
import base64 import base64
# third-party imports # external imports
import confini import confini
# local imports # local imports
from cic_cache import BloomCache
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
from cic_cache.runnable.daemons.query import (
process_transactions_account_bloom,
process_transactions_all_bloom,
process_transactions_all_data,
)
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@@ -44,72 +46,6 @@ logg.debug('config:\n{}'.format(config))
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
DEFAULT_LIMIT = 100
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
address = r[1]
if r[2] == None:
address = '0x' + address
offset = DEFAULT_LIMIT
if r.lastindex > 2:
offset = r[3]
limit = 0
if r.lastindex > 3:
limit = r[4]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
# uwsgi application # uwsgi application
def application(env, start_response): def application(env, start_response):
@@ -119,10 +55,16 @@ def application(env, start_response):
session = SessionBase.create_session() session = SessionBase.create_session()
for handler in [ for handler in [
process_transactions_all_data,
process_transactions_all_bloom, process_transactions_all_bloom,
process_transactions_account_bloom, process_transactions_account_bloom,
]: ]:
r = handler(session, env) r = None
try:
r = handler(session, env)
except ValueError as e:
start_response('400 {}'.format(str(e)))
return []
if r != None: if r != None:
(mime_type, content) = r (mime_type, content) = r
break break

View File

@@ -16,6 +16,7 @@ import cic_base.config
import cic_base.log import cic_base.log
import cic_base.argparse import cic_base.argparse
import cic_base.rpc import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
@@ -28,10 +29,8 @@ from hexathon import (
strip_0x, strip_0x,
) )
from chainsyncer.backend.sql import SQLBackend from chainsyncer.backend.sql import SQLBackend
from chainsyncer.driver import ( from chainsyncer.driver.head import HeadSyncer
HeadSyncer, from chainsyncer.driver.history import HistorySyncer
HistorySyncer,
)
from chainsyncer.db.models.base import SessionBase from chainsyncer.db.models.base import SessionBase
# local imports # local imports
@@ -113,10 +112,10 @@ def main():
logg.info('resuming sync session {}'.format(syncer_backend)) logg.info('resuming sync session {}'.format(syncer_backend))
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
syncers.append(HistorySyncer(syncer_backend)) syncers.append(HistorySyncer(syncer_backend, chain_interface))
syncer_backend = SQLBackend.live(chain_spec, block_offset+1) syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
syncers.append(HeadSyncer(syncer_backend)) syncers.append(HeadSyncer(syncer_backend, chain_interface))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:

View File

@@ -0,0 +1,2 @@
[syncer]
loop_interval = 1

View File

@@ -1,12 +1,13 @@
cic-base~=0.1.2b10 cic-base==0.1.3a3+build.984b5cff
alembic==1.4.2 alembic==1.4.2
confini~=0.3.6rc3 confini~=0.3.6rc3
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.1.0 moolb~=0.1.0
cic-eth-registry~=0.5.5a4 cic-eth-registry~=0.5.6a1
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainsyncer[sql]~=0.0.2a4 chainsyncer[sql]~=0.0.3a3
erc20-faucet~=0.2.2a1

View File

@@ -2,6 +2,7 @@
import os import os
import argparse import argparse
import logging import logging
import re
import alembic import alembic
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig
@@ -23,6 +24,8 @@ argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file') argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory') argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', action='store_true', help='force action')
argparser.add_argument('-v', action='store_true', help='be verbose') argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose') argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
@@ -53,4 +56,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', dsn) ac.set_main_option('sqlalchemy.url', dsn)
ac.set_main_option('script_location', migrations_dir) ac.set_main_option('script_location', migrations_dir)
if args.reset:
if not args.f:
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
logg.error('user chickened out on requested reset, bailing')
sys.exit(1)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head') alembic.command.upgrade(ac, 'head')

View File

@@ -6,6 +6,5 @@ sqlparse==0.4.1
pytest-celery==0.0.0a1 pytest-celery==0.0.0a1
eth_tester==0.5.0b3 eth_tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
web3==5.12.2 cic_base[full]==0.1.3a3+build.984b5cff
cic-eth-registry~=0.5.5a3 sarafu-faucet~=0.0.4a1
cic-base[full]==0.1.2b8

View File

@@ -88,3 +88,16 @@ def txs(
tx_hash_first, tx_hash_first,
tx_hash_second, tx_hash_second,
] ]
@pytest.fixture(scope='function')
def tag_txs(
init_database,
txs,
):
db.add_tag(init_database, 'taag', domain='test')
init_database.commit()
db.tag_transaction(init_database, txs[1], 'taag', domain='test')

View File

@@ -0,0 +1,31 @@
# standard imports
import json
# external imports
import pytest
# local imports
from cic_cache.runnable.daemons.query import process_transactions_all_data
def test_api_all_data(
init_database,
txs,
):
env = {
'PATH_INFO': '/txa/410000/420000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}
j = process_transactions_all_data(init_database, env)
o = json.loads(j[1])
assert len(o['data']) == 2
env = {
'PATH_INFO': '/txa/420000/410000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}
with pytest.raises(ValueError):
j = process_transactions_all_data(init_database, env)

View File

@@ -9,6 +9,7 @@ import pytest
# local imports # local imports
from cic_cache import BloomCache from cic_cache import BloomCache
from cic_cache.cache import DataCache
logg = logging.getLogger() logg = logging.getLogger()
@@ -33,3 +34,23 @@ def test_cache(
assert b[0] == list_defaults['block'] - 1 assert b[0] == list_defaults['block'] - 1
def test_cache_data(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
tag_txs,
):
session = init_database
c = DataCache(session)
b = c.load_transactions_with_data(410000, 420000)
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == txs[1]
assert b[2][1]['tx_type'] == 'unknown'
assert b[2][0]['tx_type'] == 'test.taag'

View File

@@ -5,3 +5,5 @@ omit =
cic_eth/db/migrations/* cic_eth/db/migrations/*
cic_eth/sync/head.py cic_eth/sync/head.py
cic_eth/sync/mempool.py cic_eth/sync/mempool.py
cic_eth/queue/state.py
*redis*.py

View File

@@ -5,18 +5,29 @@
.cic_eth_changes_target: .cic_eth_changes_target:
rules: rules:
- changes: - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- $CONTEXT/$APP_NAME/* #changes:
#- $CONTEXT/$APP_NAME/**/*
when: always
build-mr-cic-eth: build-mr-cic-eth:
extends: extends:
- .cic_eth_changes_target
- .py_build_merge_request
- .cic_eth_variables - .cic_eth_variables
- .cic_eth_changes_target
- .py_build_target_test
test-mr-cic-eth:
extends:
- .cic_eth_variables
- .cic_eth_changes_target
stage: test
image: $CI_REGISTRY_IMAGE/$APP_NAME-test:latest
script:
- cd apps/$APP_NAME/
- pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
needs: ["build-mr-cic-eth"]
build-push-cic-eth: build-push-cic-eth:
extends: extends:
- .py_build_push - .py_build_push
- .cic_eth_variables - .cic_eth_variables

2
apps/cic-eth/MANIFEST.in Normal file
View File

@@ -0,0 +1,2 @@
include *requirements.txt

View File

@@ -4,11 +4,18 @@ import logging
# external imports # external imports
import celery import celery
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack from chainlib.connection import RPCConnection
from chainqueue.query import get_tx from chainlib.eth.tx import (
from chainqueue.state import set_cancel unpack,
TxFactory,
)
from chainlib.eth.gas import OverrideGasOracle
from chainqueue.sql.query import get_tx
from chainqueue.sql.state import set_cancel
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
from hexathon import strip_0x
from potaahto.symbols import snake_and_camel
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
@@ -21,13 +28,14 @@ from cic_eth.admin.ctrl import (
) )
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.eth.gas import create_check_gas_task from cic_eth.eth.gas import create_check_gas_task
from cic_eth.task import BaseTask
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
@celery_app.task(bind=True) @celery_app.task(bind=True, base=BaseTask)
def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1): def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
"""Shift all transactions with nonces higher than the offset by the provided position delta. """Shift all transactions with nonces higher than the offset by the provided position delta.
Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN. Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN.
@@ -38,25 +46,29 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
:type tx_hash_orig_hex: str, 0x-hex :type tx_hash_orig_hex: str, 0x-hex
:param delta: Amount :param delta: Amount
""" """
chain_spec = ChainSpec.from_dict(chainspec_dict)
rpc = RPCConnection.connect(chain_spec, 'default')
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
queue = None queue = None
try: try:
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
except AttributeError: except AttributeError:
pass pass
chain_spec = ChainSpec.from_chain_str(chain_str) session = BaseTask.session_func()
tx_brief = get_tx(tx_hash_orig_hex) tx_brief = get_tx(chain_spec, tx_hash_orig_hex, session=session)
tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx'][2:])) tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx']))
tx = unpack(tx_raw, chain_spec) tx = unpack(tx_raw, chain_spec)
nonce = tx_brief['nonce'] nonce = tx_brief['nonce']
address = tx['from'] address = tx['from']
logg.debug('shifting nonce {} position(s) for address {}, offset {}'.format(delta, address, nonce)) logg.debug('shifting nonce {} position(s) for address {}, offset {}, hash {}'.format(delta, address, nonce, tx['hash']))
lock_queue(None, chain_str, address) lock_queue(None, chain_spec.asdict(), address=address)
lock_send(None, chain_str, address) lock_send(None, chain_spec.asdict(), address=address)
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
session = SessionBase.create_session()
q = session.query(Otx) q = session.query(Otx)
q = q.join(TxCache) q = q.join(TxCache)
q = q.filter(TxCache.sender==address) q = q.filter(TxCache.sender==address)
@@ -69,49 +81,57 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
for otx in otxs: for otx in otxs:
tx_raw = bytes.fromhex(strip_0x(otx.signed_tx)) tx_raw = bytes.fromhex(strip_0x(otx.signed_tx))
tx_new = unpack(tx_raw, chain_spec) tx_new = unpack(tx_raw, chain_spec)
tx_new = snake_and_camel(tx_new)
tx_previous_hash_hex = tx_new['hash'] tx_previous_hash_hex = tx_new['hash']
tx_previous_nonce = tx_new['nonce'] tx_previous_nonce = tx_new['nonce']
del(tx_new['hash']) tx_new['gas_price'] += 1
del(tx_new['hash_unsigned']) tx_new['gasPrice'] = tx_new['gas_price']
tx_new['nonce'] -= delta tx_new['nonce'] -= delta
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx_new, chain_str) logg.debug('tx_new {}'.format(tx_new))
del(tx_new['hash'])
del(tx_new['hash_unsigned'])
del(tx_new['hashUnsigned'])
gas_oracle = OverrideGasOracle(limit=tx_new['gas'], price=tx_new['gas_price'] + 1) # TODO: it should be possible to merely set this price here and if missing in the existing struct then fill it in (chainlib.eth.tx)
c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx_new)
logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce'])) logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce']))
otx = Otx( otx = Otx(
nonce=tx_new['nonce'], tx_new['nonce'],
address=tx_new['from'], tx_hash_hex,
tx_hash=tx_hash_hex, tx_signed_raw_hex,
signed_tx=tx_signed_raw_hex, )
)
session.add(otx) session.add(otx)
session.commit()
# TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces. # TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces.
set_cancel(tx_previous_hash_hex, True) set_cancel(chain_spec, strip_0x(tx_previous_hash_hex), manual=True, session=session)
TxCache.clone(tx_previous_hash_hex, tx_hash_hex) TxCache.clone(tx_previous_hash_hex, tx_hash_hex, session=session)
tx_hashes.append(tx_hash_hex) tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex) txs.append(tx_signed_raw_hex)
session.commit()
session.close() session.close()
s = create_check_gas_and_send_task( s = create_check_gas_task(
txs, txs,
chain_str, chain_spec,
tx_new['from'], tx_new['from'],
tx_new['gas'], gas=tx_new['gas'],
tx_hashes, tx_hashes_hex=tx_hashes,
queue, queue=queue,
) )
s_unlock_send = celery.signature( s_unlock_send = celery.signature(
'cic_eth.admin.ctrl.unlock_send', 'cic_eth.admin.ctrl.unlock_send',
[ [
chain_str, chain_spec.asdict(),
tx_new['from'], tx_new['from'],
], ],
queue=queue, queue=queue,
@@ -119,7 +139,7 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
s_unlock_direct = celery.signature( s_unlock_direct = celery.signature(
'cic_eth.admin.ctrl.unlock_queue', 'cic_eth.admin.ctrl.unlock_queue',
[ [
chain_str, chain_spec.asdict(),
tx_new['from'], tx_new['from'],
], ],
queue=queue, queue=queue,

View File

@@ -16,4 +16,6 @@ def default_token(self):
return { return {
'symbol': self.default_token_symbol, 'symbol': self.default_token_symbol,
'address': self.default_token_address, 'address': self.default_token_address,
'name': self.default_token_name,
'decimals': self.default_token_decimals,
} }

View File

@@ -8,6 +8,7 @@ from chainlib.eth.constant import (
ZERO_ADDRESS, ZERO_ADDRESS,
) )
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.contract import code from chainlib.eth.contract import code
@@ -30,13 +31,14 @@ from chainqueue.db.enum import (
status_str, status_str,
) )
from chainqueue.error import TxStateChangeError from chainqueue.error import TxStateChangeError
from chainqueue.sql.query import get_tx
from eth_erc20 import ERC20
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.role import AccountRole from cic_eth.db.models.role import AccountRole
from cic_eth.db.models.nonce import Nonce from cic_eth.db.models.nonce import Nonce
from cic_eth.error import InitializationError from cic_eth.error import InitializationError
from cic_eth.queue.query import get_tx
app = celery.current_app app = celery.current_app
@@ -188,6 +190,7 @@ class AdminApi:
s_manual = celery.signature( s_manual = celery.signature(
'cic_eth.queue.state.set_manual', 'cic_eth.queue.state.set_manual',
[ [
chain_spec.asdict(),
tx_hash_hex, tx_hash_hex,
], ],
queue=self.queue, queue=self.queue,
@@ -206,8 +209,9 @@ class AdminApi:
s.link(s_gas) s.link(s_gas)
return s_manual.apply_async() return s_manual.apply_async()
def check_nonce(self, address):
def check_nonce(self, chain_spec, address):
s = celery.signature( s = celery.signature(
'cic_eth.queue.query.get_account_tx', 'cic_eth.queue.query.get_account_tx',
[ [
@@ -228,13 +232,12 @@ class AdminApi:
s_get_tx = celery.signature( s_get_tx = celery.signature(
'cic_eth.queue.query.get_tx', 'cic_eth.queue.query.get_tx',
[ [
chain_spec.asdict(), chain_spec.asdict(),
k, k,
], ],
queue=self.queue, queue=self.queue,
) )
tx = s_get_tx.apply_async().get() tx = s_get_tx.apply_async().get()
#tx = get_tx(k)
logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce)) logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce))
nonce_otx = tx['nonce'] nonce_otx = tx['nonce']
if not is_alive(tx['status']) and tx['status'] & local_fail > 0: if not is_alive(tx['status']) and tx['status'] & local_fail > 0:
@@ -242,7 +245,9 @@ class AdminApi:
blocking_tx = k blocking_tx = k
blocking_nonce = nonce_otx blocking_nonce = nonce_otx
elif nonce_otx - last_nonce > 1: elif nonce_otx - last_nonce > 1:
logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx['from'])) logg.debug('tx {}'.format(tx))
tx_obj = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)
logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx_obj['from']))
blocking_tx = k blocking_tx = k
blocking_nonce = nonce_otx blocking_nonce = nonce_otx
break break
@@ -256,12 +261,13 @@ class AdminApi:
'blocking': blocking_nonce, 'blocking': blocking_nonce,
}, },
'tx': { 'tx': {
'blocking': blocking_tx, 'blocking': add_0x(blocking_tx),
}
} }
}
def fix_nonce(self, address, nonce, chain_spec): # TODO: is risky since it does not validate that there is actually a nonce problem?
def fix_nonce(self, chain_spec, address, nonce):
s = celery.signature( s = celery.signature(
'cic_eth.queue.query.get_account_tx', 'cic_eth.queue.query.get_account_tx',
[ [
@@ -275,15 +281,17 @@ class AdminApi:
txs = s.apply_async().get() txs = s.apply_async().get()
tx_hash_hex = None tx_hash_hex = None
session = SessionBase.create_session()
for k in txs.keys(): for k in txs.keys():
tx_dict = get_tx(k) tx_dict = get_tx(chain_spec, k, session=session)
if tx_dict['nonce'] == nonce: if tx_dict['nonce'] == nonce:
tx_hash_hex = k tx_hash_hex = k
session.close()
s_nonce = celery.signature( s_nonce = celery.signature(
'cic_eth.admin.nonce.shift_nonce', 'cic_eth.admin.nonce.shift_nonce',
[ [
self.rpc.chain_spec.asdict(), chain_spec.asdict(),
tx_hash_hex, tx_hash_hex,
], ],
queue=self.queue queue=self.queue
@@ -388,12 +396,13 @@ class AdminApi:
t = s.apply_async() t = s.apply_async()
tx = t.get() tx = t.get()
source_token = None source_token = None
if tx['source_token'] != ZERO_ADDRESS: if tx['source_token'] != ZERO_ADDRESS:
source_token_declaration = None
if registry != None: if registry != None:
try: try:
source_token = registry.by_address(tx['source_token']) source_token_declaration = registry.by_address(tx['source_token'], sender_address=self.call_address)
except UnknownContractError: except UnknownContractError:
logg.warning('unknown source token contract {} (direct)'.format(tx['source_token'])) logg.warning('unknown source token contract {} (direct)'.format(tx['source_token']))
else: else:
@@ -406,16 +415,21 @@ class AdminApi:
queue=self.queue queue=self.queue
) )
t = s.apply_async() t = s.apply_async()
source_token = t.get() source_token_declaration = t.get()
if source_token == None:
logg.warning('unknown source token contract {} (task pool)'.format(tx['source_token'])) if source_token_declaration != None:
logg.warning('found declarator record for source token {} but not checking validity'.format(tx['source_token']))
source_token = ERC20Token(chain_spec, self.rpc, tx['source_token'])
logg.debug('source token set tup {}'.format(source_token))
destination_token = None destination_token = None
if tx['destination_token'] != ZERO_ADDRESS: if tx['destination_token'] != ZERO_ADDRESS:
destination_token_declaration = None
if registry != None: if registry != None:
try: try:
destination_token = registry.by_address(tx['destination_token']) destination_token_declaration = registry.by_address(tx['destination_token'], sender_address=self.call_address)
except UnknownContractError: except UnknownContractError:
logg.warning('unknown destination token contract {}'.format(tx['destination_token'])) logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
else: else:
@@ -428,10 +442,10 @@ class AdminApi:
queue=self.queue queue=self.queue
) )
t = s.apply_async() t = s.apply_async()
destination_token = t.get() destination_token_declaration = t.get()
if destination_token == None: if destination_token_declaration != None:
logg.warning('unknown destination token contract {} (task pool)'.format(tx['destination_token'])) logg.warning('found declarator record for destination token {} but not checking validity'.format(tx['destination_token']))
destination_token = ERC20Token(chain_spec, self.rpc, tx['destination_token'])
tx['sender_description'] = 'Custodial account' tx['sender_description'] = 'Custodial account'
tx['recipient_description'] = 'Custodial account' tx['recipient_description'] = 'Custodial account'
@@ -543,13 +557,19 @@ class AdminApi:
if role != None: if role != None:
tx['recipient_description'] = role tx['recipient_description'] = role
erc20_c = ERC20(chain_spec)
if source_token != None: if source_token != None:
tx['source_token_symbol'] = source_token.symbol() tx['source_token_symbol'] = source_token.symbol
tx['sender_token_balance'] = source_token.function('balanceOf')(tx['sender']).call() o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
r = self.rpc.do(o)
tx['sender_token_balance'] = erc20_c.parse_balance(r)
if destination_token != None: if destination_token != None:
tx['destination_token_symbol'] = destination_token.symbol() tx['destination_token_symbol'] = destination_token.symbol
tx['recipient_token_balance'] = source_token.function('balanceOf')(tx['recipient']).call() o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
r = self.rpc.do(o)
tx['recipient_token_balance'] = erc20_c.parse_balance(r)
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which # TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
tx['network_status'] = 'Not in node' tx['network_status'] = 'Not in node'

View File

@@ -74,29 +74,156 @@ class Api:
return s_token.apply_async() return s_token.apply_async()
def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol): # def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed. # """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
#
# :param from_address: Ethereum address of sender
# :type from_address: str, 0x-hex
# :param to_address: Ethereum address of receipient
# :type to_address: str, 0x-hex
# :param target_return: Estimated return from conversion
# :type target_return: int
# :param minimum_return: The least value of destination token return to allow
# :type minimum_return: int
# :param from_token_symbol: ERC20 token symbol of token being converted
# :type from_token_symbol: str
# :param to_token_symbol: ERC20 token symbol of token to receive
# :type to_token_symbol: str
# :returns: uuid of root task
# :rtype: celery.Task
# """
# raise NotImplementedError('out of service until new DEX migration is done')
# s_check = celery.signature(
# 'cic_eth.admin.ctrl.check_lock',
# [
# [from_token_symbol, to_token_symbol],
# self.chain_spec.asdict(),
# LockEnum.QUEUE,
# from_address,
# ],
# queue=self.queue,
# )
# s_nonce = celery.signature(
# 'cic_eth.eth.nonce.reserve_nonce',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_tokens = celery.signature(
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
# [
# self.chain_str,
# ],
# queue=self.queue,
# )
# s_convert = celery.signature(
# 'cic_eth.eth.bancor.convert_with_default_reserve',
# [
# from_address,
# target_return,
# minimum_return,
# to_address,
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_nonce.link(s_tokens)
# s_check.link(s_nonce)
# if self.callback_param != None:
# s_convert.link(self.callback_success)
# s_tokens.link(s_convert).on_error(self.callback_error)
# else:
# s_tokens.link(s_convert)
#
# t = s_check.apply_async(queue=self.queue)
# return t
#
#
# def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
#
# :param from_address: Ethereum address of sender
# :type from_address: str, 0x-hex
# :param target_return: Estimated return from conversion
# :type target_return: int
# :param minimum_return: The least value of destination token return to allow
# :type minimum_return: int
# :param from_token_symbol: ERC20 token symbol of token being converted
# :type from_token_symbol: str
# :param to_token_symbol: ERC20 token symbol of token to receive
# :type to_token_symbol: str
# :returns: uuid of root task
# :rtype: celery.Task
# """
# raise NotImplementedError('out of service until new DEX migration is done')
# s_check = celery.signature(
# 'cic_eth.admin.ctrl.check_lock',
# [
# [from_token_symbol, to_token_symbol],
# self.chain_spec.asdict(),
# LockEnum.QUEUE,
# from_address,
# ],
# queue=self.queue,
# )
# s_nonce = celery.signature(
# 'cic_eth.eth.nonce.reserve_nonce',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_tokens = celery.signature(
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_convert = celery.signature(
# 'cic_eth.eth.bancor.convert_with_default_reserve',
# [
# from_address,
# target_return,
# minimum_return,
# from_address,
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_nonce.link(s_tokens)
# s_check.link(s_nonce)
# if self.callback_param != None:
# s_convert.link(self.callback_success)
# s_tokens.link(s_convert).on_error(self.callback_error)
# else:
# s_tokens.link(s_convert)
#
# t = s_check.apply_async(queue=self.queue)
# return t
def transfer_from(self, from_address, to_address, value, token_symbol, spender_address):
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens by one address on behalf of another address to a third party.
:param from_address: Ethereum address of sender :param from_address: Ethereum address of sender
:type from_address: str, 0x-hex :type from_address: str, 0x-hex
:param to_address: Ethereum address of receipient :param to_address: Ethereum address of recipient
:type to_address: str, 0x-hex :type to_address: str, 0x-hex
:param target_return: Estimated return from conversion :param value: Estimated return from conversion
:type target_return: int :type value: int
:param minimum_return: The least value of destination token return to allow :param token_symbol: ERC20 token symbol of token to send
:type minimum_return: int :type token_symbol: str
:param from_token_symbol: ERC20 token symbol of token being converted :param spender_address: Ethereum address of recipient
:type from_token_symbol: str :type spender_address: str, 0x-hex
:param to_token_symbol: ERC20 token symbol of token to receive
:type to_token_symbol: str
:returns: uuid of root task :returns: uuid of root task
:rtype: celery.Task :rtype: celery.Task
""" """
raise NotImplementedError('out of service until new DEX migration is done')
s_check = celery.signature( s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock', 'cic_eth.admin.ctrl.check_lock',
[ [
[from_token_symbol, to_token_symbol], [token_symbol],
self.chain_spec.asdict(), self.chain_spec.asdict(),
LockEnum.QUEUE, LockEnum.QUEUE,
from_address, from_address,
@@ -107,102 +234,51 @@ class Api:
'cic_eth.eth.nonce.reserve_nonce', 'cic_eth.eth.nonce.reserve_nonce',
[ [
self.chain_spec.asdict(), self.chain_spec.asdict(),
from_address,
], ],
queue=self.queue, queue=self.queue,
) )
s_tokens = celery.signature( s_tokens = celery.signature(
'cic_eth.eth.erc20.resolve_tokens_by_symbol', 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
[ [
self.chain_str, self.chain_spec.asdict(),
], ],
queue=self.queue, queue=self.queue,
) )
s_convert = celery.signature( s_allow = celery.signature(
'cic_eth.eth.bancor.convert_with_default_reserve', 'cic_eth.eth.erc20.check_allowance',
[
from_address,
value,
self.chain_spec.asdict(),
spender_address,
],
queue=self.queue,
)
s_transfer = celery.signature(
'cic_eth.eth.erc20.transfer_from',
[ [
from_address, from_address,
target_return,
minimum_return,
to_address, to_address,
value,
self.chain_spec.asdict(), self.chain_spec.asdict(),
spender_address,
], ],
queue=self.queue, queue=self.queue,
) )
s_tokens.link(s_allow)
s_nonce.link(s_tokens) s_nonce.link(s_tokens)
s_check.link(s_nonce) s_check.link(s_nonce)
if self.callback_param != None: if self.callback_param != None:
s_convert.link(self.callback_success) s_transfer.link(self.callback_success)
s_tokens.link(s_convert).on_error(self.callback_error) s_allow.link(s_transfer).on_error(self.callback_error)
else: else:
s_tokens.link(s_convert) s_allow.link(s_transfer)
t = s_check.apply_async(queue=self.queue) t = s_check.apply_async(queue=self.queue)
return t return t
def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
:param from_address: Ethereum address of sender
:type from_address: str, 0x-hex
:param target_return: Estimated return from conversion
:type target_return: int
:param minimum_return: The least value of destination token return to allow
:type minimum_return: int
:param from_token_symbol: ERC20 token symbol of token being converted
:type from_token_symbol: str
:param to_token_symbol: ERC20 token symbol of token to receive
:type to_token_symbol: str
:returns: uuid of root task
:rtype: celery.Task
"""
raise NotImplementedError('out of service until new DEX migration is done')
s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock',
[
[from_token_symbol, to_token_symbol],
self.chain_spec.asdict(),
LockEnum.QUEUE,
from_address,
],
queue=self.queue,
)
s_nonce = celery.signature(
'cic_eth.eth.nonce.reserve_nonce',
[
self.chain_spec.asdict(),
],
queue=self.queue,
)
s_tokens = celery.signature(
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
[
self.chain_spec.asdict(),
],
queue=self.queue,
)
s_convert = celery.signature(
'cic_eth.eth.bancor.convert_with_default_reserve',
[
from_address,
target_return,
minimum_return,
from_address,
self.chain_spec.asdict(),
],
queue=self.queue,
)
s_nonce.link(s_tokens)
s_check.link(s_nonce)
if self.callback_param != None:
s_convert.link(self.callback_success)
s_tokens.link(s_convert).on_error(self.callback_error)
else:
s_tokens.link(s_convert)
t = s_check.apply_async(queue=self.queue)
return t
def transfer(self, from_address, to_address, value, token_symbol): def transfer(self, from_address, to_address, value, token_symbol):
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another. """Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.

View File

@@ -1,8 +0,0 @@
import math
def num_serialize(n):
if n == 0:
return b'\x00'
binlog = math.log2(n)
bytelength = int(binlog / 8 + 1)
return n.to_bytes(bytelength, 'big')

View File

@@ -80,3 +80,8 @@ class SignerError(SeppukuError):
class RoleAgencyError(SeppukuError): class RoleAgencyError(SeppukuError):
"""Exception raise when a role cannot perform its function. This is a critical exception """Exception raise when a role cannot perform its function. This is a critical exception
""" """
class YouAreBrokeError(Exception):
"""Exception raised when a value transfer is attempted without access to sufficient funds
"""

View File

@@ -24,6 +24,7 @@ from cic_eth.error import (
TokenCountError, TokenCountError,
PermanentTxError, PermanentTxError,
OutOfGasError, OutOfGasError,
YouAreBrokeError,
) )
from cic_eth.queue.tx import register_tx from cic_eth.queue.tx import register_tx
from cic_eth.eth.gas import ( from cic_eth.eth.gas import (
@@ -71,6 +72,117 @@ def balance(tokens, holder_address, chain_spec_dict):
return tokens return tokens
@celery_app.task(bind=True)
def check_allowance(self, tokens, holder_address, value, chain_spec_dict, spender_address):
"""Best-effort verification that the allowance for a transfer from spend is sufficient.
:raises YouAreBrokeError: If allowance is insufficient
:param tokens: Token addresses
:type tokens: list of str, 0x-hex
:param holder_address: Token holder address
:type holder_address: str, 0x-hex
:param value: Amount of token, in 'wei'
:type value: int
:param chain_str: Chain spec string representation
:type chain_str: str
:param spender_address: Address of account spending on behalf of holder
:type spender_address: str, 0x-hex
:return: Token list as passed to task
:rtype: dict
"""
logg.debug('tokens {}'.format(tokens))
if len(tokens) != 1:
raise TokenCountError
t = tokens[0]
chain_spec = ChainSpec.from_dict(chain_spec_dict)
rpc = RPCConnection.connect(chain_spec, 'default')
caller_address = ERC20Token.caller_address
c = ERC20(chain_spec)
o = c.allowance(t['address'], holder_address, spender_address, sender_address=caller_address)
r = rpc.do(o)
allowance = c.parse_allowance(r)
if allowance < value:
errstr = 'allowance {} insufficent to transfer {} {} by {} on behalf of {}'.format(allowance, value, t['symbol'], spender_address, holder_address)
logg.error(errstr)
raise YouAreBrokeError(errstr)
return tokens
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
def transfer_from(self, tokens, holder_address, receiver_address, value, chain_spec_dict, spender_address):
"""Transfer ERC20 tokens between addresses
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
:param tokens: Token addresses
:type tokens: list of str, 0x-hex
:param holder_address: Token holder address
:type holder_address: str, 0x-hex
:param receiver_address: Token receiver address
:type receiver_address: str, 0x-hex
:param value: Amount of token, in 'wei'
:type value: int
:param chain_str: Chain spec string representation
:type chain_str: str
:param spender_address: Address of account spending on behalf of holder
:type spender_address: str, 0x-hex
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
:return: Transaction hash for tranfer operation
:rtype: str, 0x-hex
"""
# we only allow one token, one transfer
logg.debug('tokens {}'.format(tokens))
if len(tokens) != 1:
raise TokenCountError
t = tokens[0]
chain_spec = ChainSpec.from_dict(chain_spec_dict)
queue = self.request.delivery_info.get('routing_key')
rpc = RPCConnection.connect(chain_spec, 'default')
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
session = self.create_session()
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
try:
(tx_hash_hex, tx_signed_raw_hex) = c.transfer_from(t['address'], spender_address, holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED)
except FileNotFoundError as e:
raise SignerError(e)
except ConnectionError as e:
raise SignerError(e)
rpc_signer.disconnect()
rpc.disconnect()
cache_task = 'cic_eth.eth.erc20.cache_transfer_from_data'
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
session.commit()
session.close()
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
gas_budget = gas_pair[0] * gas_pair[1]
logg.debug('transfer tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
s = create_check_gas_task(
[tx_signed_raw_hex],
chain_spec,
holder_address,
gas_budget,
[tx_hash_hex],
queue,
)
s.apply_async()
return tx_hash_hex
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask) @celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict): def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict):
"""Transfer ERC20 tokens between addresses """Transfer ERC20 tokens between addresses
@@ -232,6 +344,7 @@ def resolve_tokens_by_symbol(self, token_symbols, chain_spec_dict):
logg.debug('token {}'.format(token_address)) logg.debug('token {}'.format(token_address))
tokens.append({ tokens.append({
'address': token_address, 'address': token_address,
'symbol': token_symbol,
'converters': [], 'converters': [],
}) })
rpc.disconnect() rpc.disconnect()
@@ -279,6 +392,48 @@ def cache_transfer_data(
return (tx_hash_hex, cache_id) return (tx_hash_hex, cache_id)
@celery_app.task(base=CriticalSQLAlchemyTask)
def cache_transfer_from_data(
tx_hash_hex,
tx_signed_raw_hex,
chain_spec_dict,
):
"""Helper function for otx_cache_transfer_from
:param tx_hash_hex: Transaction hash
:type tx_hash_hex: str, 0x-hex
:param tx: Signed raw transaction
:type tx: str, 0x-hex
:returns: Transaction hash and id of cache element in storage backend, respectively
:rtype: tuple
"""
chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw_bytes, chain_spec)
tx_data = ERC20.parse_transfer_from_request(tx['data'])
spender_address = tx_data[0]
recipient_address = tx_data[1]
token_value = tx_data[2]
session = SessionBase.create_session()
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
recipient_address,
tx['to'],
tx['to'],
token_value,
token_value,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def cache_approve_data( def cache_approve_data(
tx_hash_hex, tx_hash_hex,

View File

@@ -57,10 +57,12 @@ celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
MAXIMUM_FEE_UNITS = 8000000
class MaxGasOracle: class MaxGasOracle:
def gas(code=None): def gas(code=None):
return 8000000 return MAXIMUM_FEE_UNITS
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None): def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
@@ -150,7 +152,7 @@ def cache_gas_data(
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task) @celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=None): def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
"""Check the gas level of the sender address of a transaction. """Check the gas level of the sender address of a transaction.
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser. If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
@@ -170,24 +172,30 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
:return: Signed raw transaction data list :return: Signed raw transaction data list
:rtype: param txs, unchanged :rtype: param txs, unchanged
""" """
if len(txs) == 0: chain_spec = ChainSpec.from_dict(chain_spec_dict)
for i in range(len(tx_hashes)): logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes))
o = get_tx(tx_hashes[i])
txs.append(o['signed_tx']) addresspass = None
if address == None: if len(txs) == 0:
address = o['address'] addresspass = []
for i in range(len(tx_hashes)):
o = get_tx(chain_spec_dict, tx_hashes[i])
txs.append(o['signed_tx'])
logg.debug('sender {}'.format(o))
tx = unpack(bytes.fromhex(strip_0x(o['signed_tx'])), chain_spec)
if address == None:
address = tx['from']
elif address != tx['from']:
raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from']))
addresspass.append(address)
#if not web3.Web3.isChecksumAddress(address):
if not is_checksum_address(address): if not is_checksum_address(address):
raise ValueError('invalid address {}'.format(address)) raise ValueError('invalid address {}'.format(address))
chain_spec = ChainSpec.from_dict(chain_spec_dict)
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
conn = RPCConnection.connect(chain_spec) conn = RPCConnection.connect(chain_spec)
# TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx
gas_balance = 0 gas_balance = 0
try: try:
o = balance(address) o = balance(address)
@@ -198,6 +206,9 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
conn.disconnect() conn.disconnect()
raise EthError('gas_balance call for {}: {}'.format(address, e)) raise EthError('gas_balance call for {}: {}'.format(address, e))
if gas_required == None:
gas_required = MAXIMUM_FEE_UNITS
logg.debug('address {} has gas {} needs {}'.format(address, gas_balance, gas_required)) logg.debug('address {} has gas {} needs {}'.format(address, gas_balance, gas_required))
session = SessionBase.create_session() session = SessionBase.create_session()
gas_provider = AccountRole.get_address('GAS_GIFTER', session=session) gas_provider = AccountRole.get_address('GAS_GIFTER', session=session)
@@ -268,7 +279,8 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
queue=queue, queue=queue,
) )
ready_tasks.append(s) ready_tasks.append(s)
celery.group(ready_tasks)() t = celery.group(ready_tasks)()
logg.debug('group {}'.format(t))
return txs return txs

View File

@@ -21,6 +21,7 @@ from chainqueue.db.models.tx import Otx
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.error import NotLocalTxError from chainqueue.error import NotLocalTxError
from potaahto.symbols import snake_and_camel
# local imports # local imports
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
@@ -58,6 +59,9 @@ def hashes_to_txs(self, tx_hashes):
if len(tx_hashes) == 0: if len(tx_hashes) == 0:
raise ValueError('no transaction to send') raise ValueError('no transaction to send')
for i in range(len(tx_hashes)):
tx_hashes[i] = strip_0x(tx_hashes[i])
queue = self.request.delivery_info['routing_key'] queue = self.request.delivery_info['routing_key']
session = SessionBase.create_session() session = SessionBase.create_session()
@@ -148,7 +152,7 @@ def send(self, txs, chain_spec_dict):
@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3Task) @celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3Task)
def sync_tx(self, tx_hash_hex, chain_spec_dict): def sync_tx(self, tx_hash_hex, chain_spec_dict):
"""Force update of network status of a simgle transaction """Force update of network status of a single transaction
:param tx_hash_hex: Transaction hash :param tx_hash_hex: Transaction hash
:type tx_hash_hex: str, 0x-hex :type tx_hash_hex: str, 0x-hex
@@ -173,12 +177,14 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict):
# TODO: apply receipt in tx object to validate and normalize input # TODO: apply receipt in tx object to validate and normalize input
if rcpt != None: if rcpt != None:
rcpt = snake_and_camel(rcpt)
success = rcpt['status'] == 1 success = rcpt['status'] == 1
logg.debug('sync tx {} mined block {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], success)) logg.debug('sync tx {} mined block {} tx index {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], rcpt['transactionIndex'], success))
s = celery.signature( s = celery.signature(
'cic_eth.queue.state.set_final', 'cic_eth.queue.state.set_final',
[ [
chain_spec_dict,
tx_hash_hex, tx_hash_hex,
rcpt['blockNumber'], rcpt['blockNumber'],
rcpt['transactionIndex'], rcpt['transactionIndex'],
@@ -186,12 +192,14 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict):
], ],
queue=queue, queue=queue,
) )
# TODO: it's not entirely clear how we can reliable determine that its in mempool without explicitly checking
else: else:
logg.debug('sync tx {} mempool'.format(tx_hash_hex)) logg.debug('sync tx {} mempool'.format(tx_hash_hex))
s = celery.signature( s = celery.signature(
'cic_eth.queue.state.set_sent', 'cic_eth.queue.state.set_sent',
[ [
chain_spec_dict,
tx_hash_hex, tx_hash_hex,
], ],
queue=queue, queue=queue,

View File

@@ -19,7 +19,7 @@ from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token from cic_eth_registry.erc20 import ERC20Token
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusEnum from chainqueue.db.enum import StatusEnum
from chainqueue.query import get_tx_cache from chainqueue.sql.query import get_tx_cache
from eth_erc20 import ERC20 from eth_erc20 import ERC20
# local imports # local imports

View File

@@ -22,7 +22,6 @@ def init_celery_tasks(
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def celery_includes(): def celery_includes():
return [ return [
# 'cic_eth.eth.bancor',
'cic_eth.eth.erc20', 'cic_eth.eth.erc20',
'cic_eth.eth.tx', 'cic_eth.eth.tx',
'cic_eth.ext.tx', 'cic_eth.ext.tx',
@@ -47,8 +46,8 @@ def celery_config():
bq = tempfile.mkdtemp() bq = tempfile.mkdtemp()
bp = tempfile.mkdtemp() bp = tempfile.mkdtemp()
rq = tempfile.mkdtemp() rq = tempfile.mkdtemp()
logg.debug('celery broker queue {} processed {}'.format(bq, bp)) logg.debug('celery broker session queue {} processed {}'.format(bq, bp))
logg.debug('celery backend store {}'.format(rq)) logg.debug('celery backend session store {}'.format(rq))
yield { yield {
'broker_url': 'filesystem://', 'broker_url': 'filesystem://',
'broker_transport_options': { 'broker_transport_options': {
@@ -58,12 +57,11 @@ def celery_config():
}, },
'result_backend': 'file://{}'.format(rq), 'result_backend': 'file://{}'.format(rq),
} }
logg.debug('cleaning up celery filesystem backend files {} {} {}'.format(bq, bp, rq)) logg.debug('cleaning up celery session filesystem backend files {} {} {}'.format(bq, bp, rq))
shutil.rmtree(bq) shutil.rmtree(bq)
shutil.rmtree(bp) shutil.rmtree(bp)
shutil.rmtree(rq) shutil.rmtree(rq)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def celery_worker_parameters(): def celery_worker_parameters():
return { return {

View File

@@ -2,13 +2,13 @@
import os import os
import logging import logging
# third-party imports # external imports
import pytest import pytest
import confini import confini
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir) root_dir = os.path.dirname(os.path.dirname(script_dir))
logg = logging.getLogger(__file__) logg = logging.getLogger(__name__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')

View File

@@ -0,0 +1,77 @@
# standard imports
import os
# external imports
import pytest
from chainlib.eth.contract import (
ABIContractEncoder,
ABIContractType,
)
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from chainlib.eth.tx import (
receipt,
TxFactory,
TxFormat,
unpack,
Tx,
)
from hexathon import strip_0x
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
@pytest.fixture(scope='function')
def bogus_tx_block(
default_chain_spec,
eth_rpc,
eth_signer,
contract_roles,
):
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(limit=2000000, conn=eth_rpc)
f = open(os.path.join(script_dir, 'testdata', 'Bogus.bin'), 'r')
bytecode = f.read()
f.close()
c = TxFactory(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
tx = c.template(contract_roles['CONTRACT_DEPLOYER'], None, use_nonce=True)
tx = c.set_code(tx, bytecode)
(tx_hash_hex, o) = c.build(tx)
r = eth_rpc.do(o)
o = receipt(tx_hash_hex)
r = eth_rpc.do(o)
contract_address = r['contract_address']
enc = ABIContractEncoder()
enc.method('poke')
data = enc.get()
tx = c.template(contract_roles['CONTRACT_DEPLOYER'], contract_address, use_nonce=True)
tx = c.set_code(tx, data)
(tx_hash_hex, o) = c.finalize(tx, TxFormat.JSONRPC)
r = eth_rpc.do(o)
tx_signed_raw_hex = strip_0x(o['params'][0])
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
return (block, tx)

View File

@@ -37,7 +37,8 @@ def init_database(
database_engine, database_engine,
): ):
rootdir = os.path.dirname(os.path.dirname(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
rootdir = os.path.dirname(os.path.dirname(script_dir))
dbdir = os.path.join(rootdir, 'cic_eth', 'db') dbdir = os.path.join(rootdir, 'cic_eth', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE')) migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrationsdir): if not os.path.isdir(migrationsdir):

View File

@@ -0,0 +1 @@
60806040526000805534801561001457600080fd5b50610181806100246000396000f3fe608060405234801561001057600080fd5b5060043610610053576000357c0100000000000000000000000000000000000000000000000000000000900480630dbe671f146100585780631817835814610076575b600080fd5b610060610080565b60405161006d91906100ae565b60405180910390f35b61007e610086565b005b60005481565b600080815480929190610098906100d3565b9190505550565b6100a8816100c9565b82525050565b60006020820190506100c3600083018461009f565b92915050565b6000819050919050565b60006100de826100c9565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101115761011061011c565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fdfea264697066735822122034ad8e91e864f030d47f5b93e281869206c1b203c36dc79a209ac9c9c16e577564736f6c63430008040033

View File

@@ -0,0 +1,10 @@
pragma solidity ^0.8.0;
contract Bogus {
uint256 public a = 0;
function poke() public {
a++;
}
}

View File

@@ -5,7 +5,7 @@ import datetime
import celery import celery
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack from chainlib.eth.tx import unpack
import chainqueue.query import chainqueue.sql.query
from chainqueue.db.enum import ( from chainqueue.db.enum import (
StatusEnum, StatusEnum,
is_alive, is_alive,
@@ -28,7 +28,7 @@ celery_app = celery.current_app
def get_tx_cache(chain_spec_dict, tx_hash): def get_tx_cache(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.query.get_tx_cache(chain_spec, tx_hash, session=session) r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -37,7 +37,7 @@ def get_tx_cache(chain_spec_dict, tx_hash):
def get_tx(chain_spec_dict, tx_hash): def get_tx(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.query.get_tx(chain_spec, tx_hash) r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -46,7 +46,7 @@ def get_tx(chain_spec_dict, tx_hash):
def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None): def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session) r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
session.close() session.close()
return r return r
@@ -55,17 +55,17 @@ def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True,
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None): def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack) r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack)
session.close() session.close()
return r return r
def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None): def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None):
return chainqueue.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack) return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack)
def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None): def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None):
return chainqueue.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack) return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack)
def get_nonce_tx(chain_spec, nonce, sender): def get_nonce_tx(chain_spec, nonce, sender):

View File

@@ -1,6 +1,6 @@
# external imports # external imports
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
import chainqueue.state import chainqueue.sql.state
# local imports # local imports
import celery import celery
@@ -14,7 +14,7 @@ celery_app = celery.current_app
def set_sent(chain_spec_dict, tx_hash, fail=False): def set_sent(chain_spec_dict, tx_hash, fail=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_sent(chain_spec, tx_hash, fail, session=session) r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session)
session.close() session.close()
return r return r
@@ -23,7 +23,7 @@ def set_sent(chain_spec_dict, tx_hash, fail=False):
def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False): def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session) r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
session.close() session.close()
return r return r
@@ -32,7 +32,7 @@ def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
def set_cancel(chain_spec_dict, tx_hash, manual=False): def set_cancel(chain_spec_dict, tx_hash, manual=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_cancel(chain_spec, tx_hash, manual, session=session) r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session)
session.close() session.close()
return r return r
@@ -41,7 +41,7 @@ def set_cancel(chain_spec_dict, tx_hash, manual=False):
def set_rejected(chain_spec_dict, tx_hash): def set_rejected(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_rejected(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -50,7 +50,7 @@ def set_rejected(chain_spec_dict, tx_hash):
def set_fubar(chain_spec_dict, tx_hash): def set_fubar(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_fubar(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -59,7 +59,7 @@ def set_fubar(chain_spec_dict, tx_hash):
def set_manual(chain_spec_dict, tx_hash): def set_manual(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_manual(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -68,7 +68,7 @@ def set_manual(chain_spec_dict, tx_hash):
def set_ready(chain_spec_dict, tx_hash): def set_ready(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_ready(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -77,7 +77,7 @@ def set_ready(chain_spec_dict, tx_hash):
def set_reserved(chain_spec_dict, tx_hash): def set_reserved(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_reserved(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -86,7 +86,7 @@ def set_reserved(chain_spec_dict, tx_hash):
def set_waitforgas(chain_spec_dict, tx_hash): def set_waitforgas(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.set_waitforgas(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -95,7 +95,7 @@ def set_waitforgas(chain_spec_dict, tx_hash):
def get_state_log(chain_spec_dict, tx_hash): def get_state_log(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.get_state_log(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@@ -104,6 +104,6 @@ def get_state_log(chain_spec_dict, tx_hash):
def obsolete(chain_spec_dict, tx_hash, final): def obsolete(chain_spec_dict, tx_hash, final):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session) r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
session.close() session.close()
return r return r

View File

@@ -15,14 +15,14 @@ from sqlalchemy import tuple_
from sqlalchemy import func from sqlalchemy import func
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack from chainlib.eth.tx import unpack
import chainqueue.state import chainqueue.sql.state
from chainqueue.db.enum import ( from chainqueue.db.enum import (
StatusEnum, StatusEnum,
StatusBits, StatusBits,
is_alive, is_alive,
dead, dead,
) )
from chainqueue.tx import create from chainqueue.sql.tx import create
from chainqueue.error import NotLocalTxError from chainqueue.error import NotLocalTxError
from chainqueue.db.enum import status_str from chainqueue.db.enum import status_str

View File

@@ -5,29 +5,30 @@ import logging
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
from chainlib.eth.constant import ZERO_ADDRESS
logg = logging.getLogger() logg = logging.getLogger()
def connect_token_registry(rpc, chain_spec): def connect_token_registry(rpc, chain_spec, sender_address=ZERO_ADDRESS):
registry = CICRegistry(chain_spec, rpc) registry = CICRegistry(chain_spec, rpc)
token_registry_address = registry.by_name('TokenRegistry') token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
logg.debug('using token registry address {}'.format(token_registry_address)) logg.debug('using token registry address {}'.format(token_registry_address))
lookup = TokenIndexLookup(chain_spec, token_registry_address) lookup = TokenIndexLookup(chain_spec, token_registry_address)
CICRegistry.add_lookup(lookup) CICRegistry.add_lookup(lookup)
def connect_declarator(rpc, chain_spec, trusted_addresses): def connect_declarator(rpc, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
registry = CICRegistry(chain_spec, rpc) registry = CICRegistry(chain_spec, rpc)
declarator_address = registry.by_name('AddressDeclarator') declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
logg.debug('using declarator address {}'.format(declarator_address)) logg.debug('using declarator address {}'.format(declarator_address))
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses) lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
CICRegistry.add_lookup(lookup) CICRegistry.add_lookup(lookup)
def connect(rpc, chain_spec, registry_address): def connect(rpc, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
CICRegistry.address = registry_address CICRegistry.address = registry_address
registry = CICRegistry(chain_spec, rpc) registry = CICRegistry(chain_spec, rpc)
registry_address = registry.by_name('ContractRegistry') registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
return registry return registry

View File

@@ -21,7 +21,7 @@ from chainqueue.db.enum import (
StatusBits, StatusBits,
) )
from chainqueue.error import NotLocalTxError from chainqueue.error import NotLocalTxError
from chainqueue.state import set_reserved from chainqueue.sql.state import set_reserved
# local imports # local imports
import cic_eth import cic_eth

View File

@@ -72,7 +72,9 @@ class CallbackFilter(SyncFilter):
#transfer_data['token_address'] = tx.inputs[0] #transfer_data['token_address'] = tx.inputs[0]
faucet_contract = tx.inputs[0] faucet_contract = tx.inputs[0]
o = Faucet.token(faucet_contract, sender_address=self.caller_address) c = Faucet(self.chain_spec)
o = c.token(faucet_contract, sender_address=self.caller_address)
r = conn.do(o) r = conn.do(o)
transfer_data['token_address'] = add_0x(c.parse_token(r)) transfer_data['token_address'] = add_0x(c.parse_token(r))

View File

@@ -10,14 +10,15 @@ from chainlib.eth.tx import unpack
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.query import get_paused_tx_cache as get_paused_tx from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.eth.gas import create_check_gas_task from cic_eth.eth.gas import create_check_gas_task
from .base import SyncFilter from .base import SyncFilter
logg = logging.getLogger().getChild(__name__) #logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
class GasFilter(SyncFilter): class GasFilter(SyncFilter):
@@ -27,11 +28,11 @@ class GasFilter(SyncFilter):
self.chain_spec = chain_spec self.chain_spec = chain_spec
def filter(self, conn, block, tx, session): def filter(self, conn, block, tx, db_session):
if tx.value > 0: if tx.value > 0:
tx_hash_hex = add_0x(tx.hash) tx_hash_hex = add_0x(tx.hash)
logg.debug('gas refill tx {}'.format(tx_hash_hex)) logg.debug('gas refill tx {}'.format(tx_hash_hex))
session = SessionBase.bind_session(session) session = SessionBase.bind_session(db_session)
q = session.query(TxCache.recipient) q = session.query(TxCache.recipient)
q = q.join(Otx) q = q.join(Otx)
q = q.filter(Otx.tx_hash==strip_0x(tx_hash_hex)) q = q.filter(Otx.tx_hash==strip_0x(tx_hash_hex))
@@ -56,7 +57,7 @@ class GasFilter(SyncFilter):
tx_hashes_hex=list(txs.keys()), tx_hashes_hex=list(txs.keys()),
queue=self.queue, queue=self.queue,
) )
s.apply_async() return s.apply_async()
def __str__(self): def __str__(self):

View File

@@ -50,7 +50,8 @@ class RegistrationFilter(SyncFilter):
queue=self.queue, queue=self.queue,
) )
s_nonce.link(s_gift) s_nonce.link(s_gift)
s_nonce.apply_async() t = s_nonce.apply_async()
return t
def __str__(self): def __str__(self):

View File

@@ -3,7 +3,7 @@ import logging
# external imports # external imports
import celery import celery
from chainqueue.state import obsolete_by_cache from chainqueue.sql.state import obsolete_by_cache
logg = logging.getLogger() logg = logging.getLogger()

View File

@@ -32,7 +32,7 @@ class TransferAuthFilter(SyncFilter):
self.transfer_request_contract = registry.by_name('TransferAuthorization', sender_address=call_address) self.transfer_request_contract = registry.by_name('TransferAuthorization', sender_address=call_address)
def filter(self, conn, block, tx, session): #rcpt, chain_str, session=None): def filter(self, conn, block, tx, db_session): #rcpt, chain_str, session=None):
if tx.payload == None: if tx.payload == None:
logg.debug('no payload') logg.debug('no payload')
@@ -45,16 +45,17 @@ class TransferAuthFilter(SyncFilter):
return False return False
recipient = tx.inputs[0] recipient = tx.inputs[0]
if recipient != self.transfer_request_contract.address(): #if recipient != self.transfer_request_contract.address():
if recipient != self.transfer_request_contract:
logg.debug('not our transfer auth contract address {}'.format(recipient)) logg.debug('not our transfer auth contract address {}'.format(recipient))
return False return False
r = TransferAuthorization.parse_create_request_request(tx.payload) r = TransferAuthorization.parse_create_request_request(tx.payload)
sender = abi_decode_single(ABIContractType.ADDRESS, r[0]) sender = r[0]
recipient = abi_decode_single(ABIContractType.ADDRESS, r[1]) recipient = r[1]
token = abi_decode_single(ABIContractType.ADDRESS, r[2]) token = r[2]
value = abi_decode_single(ABIContractType.UINT256, r[3]) value = r[3]
token_data = { token_data = {
'address': token, 'address': token,
@@ -64,6 +65,7 @@ class TransferAuthFilter(SyncFilter):
'cic_eth.eth.nonce.reserve_nonce', 'cic_eth.eth.nonce.reserve_nonce',
[ [
[token_data], [token_data],
self.chain_spec.asdict(),
sender, sender,
], ],
queue=self.queue, queue=self.queue,
@@ -80,7 +82,7 @@ class TransferAuthFilter(SyncFilter):
) )
s_nonce.link(s_approve) s_nonce.link(s_approve)
t = s_nonce.apply_async() t = s_nonce.apply_async()
return True return t
def __str__(self): def __str__(self):

View File

@@ -1,136 +0,0 @@
# standard imports
import os
import re
import logging
import argparse
import json
# third-party imports
import web3
import confini
import celery
from json.decoder import JSONDecodeError
from cic_registry.chain import ChainSpec
# local imports
from cic_eth.db import dsn_from_config
from cic_eth.db.models.base import SessionBase
from cic_eth.eth.util import unpack_signed_raw_tx
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
config_dir = os.path.join('/usr/local/etc/cic-eth')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
re_something = r'^/something/?'
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
def process_something(session, env):
r = re.match(re_something, env.get('PATH_INFO'))
if not r:
return None
#if env.get('CONTENT_TYPE') != 'application/json':
# raise AttributeError('content type')
#if env.get('REQUEST_METHOD') != 'POST':
# raise AttributeError('method')
#post_data = json.load(env.get('wsgi.input'))
#return ('text/plain', 'foo'.encode('utf-8'),)
# uwsgi application
def application(env, start_response):
for k in env.keys():
logg.debug('env {} {}'.format(k, env[k]))
headers = []
content = b''
err = None
session = SessionBase.create_session()
for handler in [
process_something,
]:
try:
r = handler(session, env)
except AttributeError as e:
logg.error('handler fail attribute {}'.format(e))
err = '400 Impertinent request'
break
except JSONDecodeError as e:
logg.error('handler fail json {}'.format(e))
err = '400 Invalid data format'
break
except KeyError as e:
logg.error('handler fail key {}'.format(e))
err = '400 Invalid JSON'
break
except ValueError as e:
logg.error('handler fail value {}'.format(e))
err = '400 Invalid data'
break
except RuntimeError as e:
logg.error('task fail value {}'.format(e))
err = '500 Task failed, sorry I cannot tell you more'
break
if r != None:
(mime_type, content) = r
break
session.close()
if err != None:
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
start_response(err, headers)
session.close()
return [content]
headers.append(('Content-Length', str(len(content))),)
headers.append(('Access-Control-Allow-Origin', '*',));
if len(content) == 0:
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
start_response('404 Looked everywhere, sorry', headers)
else:
headers.append(('Content-Type', mime_type,))
start_response('200 OK', headers)
return [content]

View File

@@ -22,6 +22,7 @@ from chainlib.eth.connection import (
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from cic_eth_registry.erc20 import ERC20Token
import liveness.linux import liveness.linux
@@ -193,6 +194,7 @@ def main():
except UnknownContractError as e: except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e)) logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1) sys.exit(1)
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:
@@ -207,6 +209,11 @@ def main():
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL') BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol) BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address)
default_token.load(rpc)
BaseTask.default_token_decimals = default_token.decimals
BaseTask.default_token_name = default_token.name
BaseTask.run_dir = config.get('CIC_RUN_DIR') BaseTask.run_dir = config.get('CIC_RUN_DIR')
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address)) logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))

View File

@@ -15,6 +15,7 @@ import cic_base.config
import cic_base.log import cic_base.log
import cic_base.argparse import cic_base.argparse
import cic_base.rpc import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
@@ -26,10 +27,8 @@ from hexathon import (
strip_0x, strip_0x,
) )
from chainsyncer.backend.sql import SQLBackend from chainsyncer.backend.sql import SQLBackend
from chainsyncer.driver import ( from chainsyncer.driver.head import HeadSyncer
HeadSyncer, from chainsyncer.driver.history import HistorySyncer
HistorySyncer,
)
from chainsyncer.db.models.base import SessionBase from chainsyncer.db.models.base import SessionBase
# local imports # local imports
@@ -80,6 +79,7 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def main(): def main():
# connect to celery # connect to celery
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
@@ -121,11 +121,11 @@ def main():
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
try: try:
syncers.append(HistorySyncer(syncer_backend)) syncers.append(HistorySyncer(syncer_backend, chain_interface))
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend)) logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
except AttributeError: except AttributeError:
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend)) logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
syncers.append(HeadSyncer(syncer_backend)) syncers.append(HeadSyncer(syncer_backend, chain_interface))
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))

View File

@@ -67,6 +67,8 @@ def main():
token_info = t.get() token_info = t.get()
print('Default token symbol: {}'.format(token_info['symbol'])) print('Default token symbol: {}'.format(token_info['symbol']))
print('Default token address: {}'.format(token_info['address'])) print('Default token address: {}'.format(token_info['address']))
logg.debug('Default token name: {}'.format(token_info['name']))
logg.debug('Default token decimals: {}'.format(token_info['decimals']))
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -20,7 +20,11 @@ def init_chain_stat(rpc, block_start=0):
if block_start == 0: if block_start == 0:
o = block_latest() o = block_latest()
r = rpc.do(o) r = rpc.do(o)
block_start = int(r, 16) try:
block_start = int(r, 16)
except TypeError:
block_start = int(r)
logg.debug('blockstart {}'.format(block_start))
for i in range(BLOCK_SAMPLES): for i in range(BLOCK_SAMPLES):
o = block_by_number(block_start-10+i) o = block_by_number(block_start-10+i)

View File

@@ -20,7 +20,8 @@ import liveness.linux
from cic_eth.error import SeppukuError from cic_eth.error import SeppukuError
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
logg = logging.getLogger().getChild(__name__) #logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
celery_app = celery.current_app celery_app = celery.current_app
@@ -33,6 +34,8 @@ class BaseTask(celery.Task):
create_gas_oracle = RPCGasOracle create_gas_oracle = RPCGasOracle
default_token_address = None default_token_address = None
default_token_symbol = None default_token_symbol = None
default_token_name = None
default_token_decimals = None
run_dir = '/run' run_dir = '/run'
def create_session(self): def create_session(self):
@@ -116,12 +119,13 @@ def registry():
return CICRegistry.address return CICRegistry.address
@celery_app.task() @celery_app.task(bind=True, base=BaseTask)
def registry_address_lookup(chain_spec_dict, address, connection_tag='default'): def registry_address_lookup(self, chain_spec_dict, address, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag) conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn) registry = CICRegistry(chain_spec, conn)
return registry.by_address(address) r = registry.by_address(address, sender_address=self.call_address)
return r
@celery_app.task(throws=(UnknownContractError,)) @celery_app.task(throws=(UnknownContractError,))
@@ -129,7 +133,7 @@ def registry_name_lookup(chain_spec_dict, name, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag) conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn) registry = CICRegistry(chain_spec, conn)
return registry.by_name(name) return registry.by_name(name, sender_address=self.call_address)
@celery_app.task() @celery_app.task()

View File

@@ -9,8 +9,8 @@ import semver
version = ( version = (
0, 0,
11, 11,
0, 1,
'beta.13', 'alpha.3',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@@ -1,48 +1,62 @@
# FROM grassrootseconomics:cic FROM python:3.8.6-slim-buster as compile
#FROM python:3.8.6-alpine
FROM python:3.8.6-slim-buster
#COPY --from=0 /usr/local/share/cic/solidity/ /usr/local/share/cic/solidity/
WORKDIR /usr/src/cic-eth WORKDIR /usr/src/cic-eth
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
ARG root_requirement_file='requirements.txt'
#RUN apk update && \
# apk add gcc musl-dev gnupg libpq
#RUN apk add postgresql-dev
#RUN apk add linux-headers
#RUN apk add libffi-dev
RUN apt-get update && \ RUN apt-get update && \
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
# Copy shared requirements from top of mono-repo #RUN python -m venv venv && . venv/bin/activate
RUN echo "copying root req file: ${root_requirement_file}"
#COPY $root_requirement_file .
#RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
RUN /usr/local/bin/python -m pip install --upgrade pip
#RUN git clone https://gitlab.com/grassrootseconomics/cic-base.git && \
# cd cic-base && \
# git checkout 7ae1f02efc206b13a65873567b0f6d1c3b7f9bc0 && \
# python merge_requirements.py | tee merged_requirements.txt
#RUN cd cic-base && \
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
COPY cic-eth/scripts/ scripts/ ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
COPY cic-eth/setup.cfg cic-eth/setup.py ./ RUN /usr/local/bin/python -m pip install --upgrade pip
COPY cic-eth/cic_eth/ cic_eth/ RUN pip install semver
# Copy app specific requirements
COPY cic-eth/requirements.txt . # TODO use a packaging style that lets us copy requirments only ie. pip-tools
COPY cic-eth/test_requirements.txt . COPY cic-eth/ .
RUN pip install $pip_extra_index_url_flag . RUN pip install $pip_extra_index_url_flag .
# --- TEST IMAGE ---
FROM python:3.8.6-slim-buster as test
RUN apt-get update && \
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
WORKDIR /usr/src/cic-eth
RUN /usr/local/bin/python -m pip install --upgrade pip
COPY --from=compile /usr/local/bin/ /usr/local/bin/
COPY --from=compile /usr/local/lib/python3.8/site-packages/ \
/usr/local/lib/python3.8/site-packages/
# TODO we could use venv inside container to isolate the system and app deps further
# COPY --from=compile /usr/src/cic-eth/ .
# RUN . venv/bin/activate
COPY cic-eth/test_requirements.txt .
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
COPY cic-eth .
ENV PYTHONPATH .
ENTRYPOINT ["pytest"]
# --- RUNTIME ---
FROM python:3.8.6-slim-buster as runtime
RUN apt-get update && \
apt install -y gnupg libpq-dev procps
WORKDIR /usr/src/cic-eth
COPY --from=compile /usr/local/bin/ /usr/local/bin/
COPY --from=compile /usr/local/lib/python3.8/site-packages/ \
/usr/local/lib/python3.8/site-packages/
COPY cic-eth/docker/* ./ COPY cic-eth/docker/* ./
RUN chmod 755 *.sh RUN chmod 755 *.sh
COPY cic-eth/tests/ tests/
COPY cic-eth/scripts/ scripts/
# # ini files in config directory defines the configurable parameters for the application # # ini files in config directory defines the configurable parameters for the application
# # they can all be overridden by environment variables # # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package) # # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
@@ -51,3 +65,4 @@ COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/ COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
COPY util/liveness/health.sh /usr/local/bin/health.sh COPY util/liveness/health.sh /usr/local/bin/health.sh

View File

@@ -1,24 +1,25 @@
cic-base~=0.1.2b11 cic-base==0.1.3a3+build.984b5cff
celery==4.4.7 celery==4.4.7
crypto-dev-signer~=0.4.14b3 crypto-dev-signer~=0.4.14b6
confini~=0.3.6rc3 confini~=0.3.6rc3
cic-eth-registry~=0.5.5a4 cic-eth-registry~=0.5.6a1
redis==3.5.3 redis==3.5.3
alembic==1.4.2 alembic==1.4.2
websockets==8.1 websockets==8.1
requests~=2.24.0 requests~=2.24.0
eth_accounts_index~=0.0.11a12 eth_accounts_index~=0.0.12a1
erc20-transfer-authorization~=0.3.1a6 erc20-transfer-authorization~=0.3.2a1
uWSGI==2.0.19.1 uWSGI==2.0.19.1
semver==2.13.0 semver==2.13.0
websocket-client==0.57.0 websocket-client==0.57.0
moolb~=0.1.1b2 moolb~=0.1.1b2
eth-address-index~=0.1.1a11 eth-address-index~=0.1.2a1
chainlib~=0.0.3a2 chainlib-eth~=0.0.5a1
hexathon~=0.0.1a7 hexathon~=0.0.1a7
chainsyncer[sql]~=0.0.2a4 chainsyncer[sql]~=0.0.3a3
chainqueue~=0.0.2a2 chainqueue~=0.0.2b5
sarafu-faucet==0.0.3a3 sarafu-faucet~=0.0.4a1
erc20-faucet==0.2.1a4 erc20-faucet~=0.2.2a1
coincurve==15.0.0 coincurve==15.0.0
potaahto~=0.0.1a2 potaahto~=0.0.1a2
pycryptodome==3.10.1

View File

@@ -2,6 +2,8 @@
import os import os
import argparse import argparse
import logging import logging
import re
import sys
import alembic import alembic
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig
@@ -23,6 +25,8 @@ argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file') argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory') argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', action='store_true', help='force action')
argparser.add_argument('-v', action='store_true', help='be verbose') argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose') argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
@@ -53,4 +57,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', dsn) ac.set_main_option('sqlalchemy.url', dsn)
ac.set_main_option('script_location', migrations_dir) ac.set_main_option('script_location', migrations_dir)
if args.reset:
if not args.f:
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
logg.error('user chickened out on requested reset, bailing')
sys.exit(1)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head') alembic.command.upgrade(ac, 'head')

View File

@@ -11,17 +11,6 @@ while True:
requirements.append(l.rstrip()) requirements.append(l.rstrip())
f.close() f.close()
test_requirements = []
f = open('test_requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
test_requirements.append(l.rstrip())
f.close()
setup( setup(
install_requires=requirements, install_requires=requirements
tests_require=test_requirements,
) )

View File

@@ -4,4 +4,3 @@ pytest-mock==3.3.1
pytest-cov==2.10.1 pytest-cov==2.10.1
eth-tester==0.5.0b3 eth-tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
giftable-erc20-token==0.0.8a9

View File

@@ -0,0 +1,8 @@
# local imports
from cic_eth.check.db import health
def test_check_health(
init_database,
):
assert health()

View File

@@ -0,0 +1,20 @@
# local imports
from cic_eth.check.gas import health
from cic_eth.db.models.role import AccountRole
def test_check_gas(
config,
init_database,
default_chain_spec,
eth_rpc,
custodial_roles,
whoever,
):
config.add(str(default_chain_spec), 'CIC_CHAIN_SPEC', exists_ok=True)
config.add(100, 'ETH_GAS_GIFTER_MINIMUM_BALANCE', exists_ok=True)
assert health(config=config)
AccountRole.set('GAS_GIFTER', whoever, session=init_database)
init_database.commit()
assert not health(config=config)

View File

@@ -0,0 +1,16 @@
# external imports
import pytest
# local imports
from cic_eth.check.redis import health
def test_check_redis(
config,
have_redis,
):
if have_redis != None:
pytest.skip('cannot connect to redis, skipping test: {}'.format(have_redis))
assert health(unit='test', config=config)

View File

@@ -0,0 +1,13 @@
# local imports
from cic_eth.check.signer import health
def test_check_signer(
default_chain_spec,
config,
eth_signer,
eth_rpc,
):
config.add(str(default_chain_spec), 'CIC_CHAIN_SPEC', exists_ok=True)
assert health(config=config)

View File

@@ -2,9 +2,11 @@
import os import os
import sys import sys
import logging import logging
import uuid
# external imports # external imports
from chainlib.eth.erc20 import ERC20 from eth_erc20 import ERC20
import redis
# local imports # local imports
from cic_eth.api import Api from cic_eth.api import Api
@@ -15,10 +17,11 @@ root_dir = os.path.dirname(script_dir)
sys.path.insert(0, root_dir) sys.path.insert(0, root_dir)
# assemble fixtures # assemble fixtures
from tests.fixtures_config import * from cic_eth.pytest.fixtures_config import *
from tests.fixtures_database import * from cic_eth.pytest.fixtures_celery import *
from tests.fixtures_celery import * from cic_eth.pytest.fixtures_database import *
from tests.fixtures_role import * from cic_eth.pytest.fixtures_role import *
from cic_eth.pytest.fixtures_contract import *
from chainlib.eth.pytest import * from chainlib.eth.pytest import *
from eth_contract_registry.pytest import * from eth_contract_registry.pytest import *
from cic_eth_registry.pytest.fixtures_contracts import * from cic_eth_registry.pytest.fixtures_contracts import *
@@ -55,3 +58,28 @@ def default_token(
): ):
BaseTask.default_token_symbol = foo_token_symbol BaseTask.default_token_symbol = foo_token_symbol
BaseTask.default_token_address = foo_token BaseTask.default_token_address = foo_token
@pytest.fixture(scope='session')
def have_redis(
config,
):
r = redis.Redis(
host = config.get('REDIS_HOST'),
port = config.get('REDIS_PORT'),
db = config.get('REDIS_DB'),
)
k = str(uuid.uuid4())
try:
r.set(k, 'foo')
r.delete(k)
except redis.exceptions.ConnectionError as e:
return e
except TypeError as e:
return e
return None

View File

@@ -14,9 +14,9 @@ from chainlib.eth.tx import (
Tx, Tx,
) )
from chainlib.eth.block import Block from chainlib.eth.block import Block
from chainlib.eth.erc20 import ERC20 from eth_erc20 import ERC20
from sarafu_faucet import MinterFaucet from sarafu_faucet import MinterFaucet
from eth_accounts_index import AccountRegistry from eth_accounts_index.registry import AccountRegistry
from potaahto.symbols import snake_and_camel from potaahto.symbols import snake_and_camel
from hexathon import add_0x from hexathon import add_0x
@@ -26,7 +26,6 @@ from cic_eth.runnable.daemons.filters.callback import CallbackFilter
logg = logging.getLogger() logg = logging.getLogger()
@pytest.mark.skip()
def test_transfer_tx( def test_transfer_tx(
default_chain_spec, default_chain_spec,
init_database, init_database,
@@ -66,7 +65,6 @@ def test_transfer_tx(
assert transfer_type == 'transfer' assert transfer_type == 'transfer'
@pytest.mark.skip()
def test_transfer_from_tx( def test_transfer_from_tx(
default_chain_spec, default_chain_spec,
init_database, init_database,

View File

@@ -0,0 +1,38 @@
# local imports
from cic_eth.runnable.daemons.filters.gas import GasFilter
from cic_eth.runnable.daemons.filters.transferauth import TransferAuthFilter
from cic_eth.runnable.daemons.filters.callback import CallbackFilter
from cic_eth.runnable.daemons.filters.straggler import StragglerFilter
from cic_eth.runnable.daemons.filters.tx import TxFilter
from cic_eth.runnable.daemons.filters.register import RegistrationFilter
# Hit tx mismatch paths on all filters
def test_filter_bogus(
init_database,
bogus_tx_block,
default_chain_spec,
eth_rpc,
eth_signer,
transfer_auth,
cic_registry,
contract_roles,
register_lookups,
):
fltrs = [
TransferAuthFilter(cic_registry, default_chain_spec, eth_rpc, call_address=contract_roles['CONTRACT_DEPLOYER']),
GasFilter(default_chain_spec, queue=None),
TxFilter(default_chain_spec, None),
CallbackFilter(default_chain_spec, None, None, caller_address=contract_roles['CONTRACT_DEPLOYER']),
StragglerFilter(default_chain_spec, None),
RegistrationFilter(default_chain_spec, queue=None),
]
for fltr in fltrs:
r = None
try:
r = fltr.filter(eth_rpc, bogus_tx_block[0], bogus_tx_block[1], db_session=init_database)
except:
pass
assert not r

View File

@@ -0,0 +1,101 @@
# external imports
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import OverrideNonceOracle
from chainqueue.sql.tx import create as queue_create
from chainlib.eth.tx import (
TxFormat,
unpack,
Tx,
)
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from chainqueue.sql.state import (
set_waitforgas,
)
from hexathon import strip_0x
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits
# local imports
from cic_eth.runnable.daemons.filters.gas import GasFilter
from cic_eth.eth.gas import cache_gas_data
def test_filter_gas(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_waitforgas(default_chain_spec, tx_hash_hex, session=init_database)
init_database.commit()
tx_hash_hex_wait = tx_hash_hex
otx = Otx.load(tx_hash_hex_wait, session=init_database)
assert otx.status & StatusBits.GAS_ISSUES == StatusBits.GAS_ISSUES
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['BOB'], agent_roles['ALICE'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
43,
agent_roles['BOB'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
fltr = GasFilter(default_chain_spec, queue=None)
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
t.get_leaf()
assert t.successful()
init_database.commit()
otx = Otx.load(tx_hash_hex_wait, session=init_database)
assert otx.status & StatusBits.QUEUED == StatusBits.QUEUED

View File

@@ -0,0 +1,78 @@
# external imports
from eth_accounts_index.registry import AccountRegistry
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.tx import(
receipt,
unpack,
Tx,
)
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from erc20_faucet import Faucet
from hexathon import strip_0x
from chainqueue.sql.query import get_account_tx
# local imports
from cic_eth.runnable.daemons.filters.register import RegistrationFilter
def test_register_filter(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
account_registry,
faucet,
register_lookups,
contract_roles,
agent_roles,
cic_registry,
init_celery_tasks,
celery_session_worker,
caplog,
):
nonce_oracle = RPCNonceOracle(contract_roles['ACCOUNT_REGISTRY_WRITER'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(limit=AccountRegistry.gas(), conn=eth_rpc)
c = AccountRegistry(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.add(account_registry, contract_roles['ACCOUNT_REGISTRY_WRITER'], agent_roles['ALICE'])
r = eth_rpc.do(o)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(o['params'][0]))
o = receipt(tx_hash_hex)
rcpt = eth_rpc.do(o)
assert rcpt['status'] == 1
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block, rcpt=rcpt)
tx.apply_receipt(rcpt)
fltr = RegistrationFilter(default_chain_spec, queue=None)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
t.get_leaf()
assert t.successful()
gift_txs = get_account_tx(default_chain_spec.asdict(), agent_roles['ALICE'], as_sender=True, session=init_database)
ks = list(gift_txs.keys())
assert len(ks) == 1
tx_raw_signed_hex = strip_0x(gift_txs[ks[0]])
tx_raw_signed_bytes = bytes.fromhex(tx_raw_signed_hex)
gift_tx = unpack(tx_raw_signed_bytes, default_chain_spec)
gift = Faucet.parse_give_to_request(gift_tx['data'])
assert gift[0] == agent_roles['ALICE']

View File

@@ -17,8 +17,8 @@ from chainlib.eth.block import (
) )
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.tx import create as queue_create from chainqueue.sql.tx import create as queue_create
from chainqueue.state import ( from chainqueue.sql.state import (
set_reserved, set_reserved,
set_ready, set_ready,
set_sent, set_sent,

View File

@@ -0,0 +1,79 @@
# external imports
from erc20_transfer_authorization import TransferAuthorization
from eth_erc20 import ERC20
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.tx import (
receipt,
unpack,
Tx,
)
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from hexathon import strip_0x
from chainqueue.sql.query import get_account_tx
# local imports
from cic_eth.runnable.daemons.filters.transferauth import TransferAuthFilter
def test_filter_transferauth(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
transfer_auth,
foo_token,
celery_session_worker,
register_lookups,
init_custodial,
cic_registry,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
gas_oracle = OverrideGasOracle(limit=200000, conn=eth_rpc)
c = TransferAuthorization(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.create_request(transfer_auth, contract_roles['CONTRACT_DEPLOYER'], agent_roles['ALICE'], agent_roles['BOB'], foo_token, 1024)
r = rpc.do(o)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(o['params'][0]))
o = receipt(tx_hash_hex)
r = rpc.do(o)
assert r['status'] == 1
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
#tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
fltr = TransferAuthFilter(cic_registry, default_chain_spec, eth_rpc, call_address=contract_roles['CONTRACT_DEPLOYER'])
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
t.get_leaf()
assert t.successful()
approve_txs = get_account_tx(default_chain_spec.asdict(), agent_roles['ALICE'], as_sender=True, session=init_database)
ks = list(approve_txs.keys())
assert len(ks) == 1
tx_raw_signed_hex = strip_0x(approve_txs[ks[0]])
tx_raw_signed_bytes = bytes.fromhex(tx_raw_signed_hex)
approve_tx = unpack(tx_raw_signed_bytes, default_chain_spec)
c = ERC20(default_chain_spec)
approve = c.parse_approve_request(approve_tx['data'])
assert approve[0] == agent_roles['BOB']

View File

@@ -17,13 +17,12 @@ from chainlib.eth.block import (
) )
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.tx import create as queue_create from chainqueue.sql.tx import create as queue_create
from chainqueue.state import ( from chainqueue.sql.state import (
set_reserved, set_reserved,
set_ready, set_ready,
set_sent, set_sent,
) )
from hexathon import strip_0x from hexathon import strip_0x
# local imports # local imports
@@ -31,7 +30,7 @@ from cic_eth.runnable.daemons.filters.tx import TxFilter
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
def test_tx( def test_filter_tx(
default_chain_spec, default_chain_spec,
init_database, init_database,
eth_rpc, eth_rpc,

View File

@@ -9,8 +9,14 @@ from chainlib.eth.tx import (
unpack, unpack,
TxFormat, TxFormat,
) )
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import (
from chainlib.eth.gas import Gas RPCNonceOracle,
OverrideNonceOracle,
)
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from hexathon import ( from hexathon import (
strip_0x, strip_0x,
@@ -23,7 +29,15 @@ from chainqueue.db.enum import (
StatusBits, StatusBits,
status_str, status_str,
) )
from chainqueue.query import get_tx from chainqueue.sql.state import (
set_fubar,
set_ready,
set_reserved,
)
from chainqueue.sql.query import (
get_tx,
get_nonce_tx_cache,
)
# local imports # local imports
from cic_eth.api import AdminApi from cic_eth.api import AdminApi
@@ -36,150 +50,6 @@ from cic_eth.queue.tx import queue_create
logg = logging.getLogger() logg = logging.getLogger()
#def test_resend_inplace(
# default_chain_spec,
# init_database,
# init_w3,
# celery_session_worker,
# ):
#
# chain_str = str(default_chain_spec)
# c = RpcClient(default_chain_spec)
#
# sigs = []
#
# gas_provider = c.gas_provider()
#
# s_nonce = celery.signature(
# 'cic_eth.eth.nonce.reserve_nonce',
# [
# init_w3.eth.accounts[0],
# gas_provider,
# ],
# queue=None,
# )
# s_refill = celery.signature(
# 'cic_eth.eth.gas.refill_gas',
# [
# chain_str,
# ],
# queue=None,
# )
# s_nonce.link(s_refill)
# t = s_nonce.apply_async()
# t.get()
# for r in t.collect():
# pass
# assert t.successful()
#
# q = init_database.query(Otx)
# q = q.join(TxCache)
# q = q.filter(TxCache.recipient==init_w3.eth.accounts[0])
# o = q.first()
# tx_raw = o.signed_tx
#
# tx_dict = unpack(bytes.fromhex(tx_raw), default_chain_spec)
# gas_price_before = tx_dict['gasPrice']
#
# s = celery.signature(
# 'cic_eth.admin.ctrl.lock_send',
# [
# chain_str,
# init_w3.eth.accounts[0],
# ],
# queue=None,
# )
# t = s.apply_async()
# t.get()
# assert t.successful()
#
# api = AdminApi(c, queue=None)
# t = api.resend(tx_dict['hash'], chain_str, unlock=True)
# t.get()
# i = 0
# tx_hash_new_hex = None
# for r in t.collect():
# tx_hash_new_hex = r[1]
# assert t.successful()
#
# tx_raw_new = get_tx(tx_hash_new_hex)
# logg.debug('get {}'.format(tx_raw_new))
# tx_dict_new = unpack(bytes.fromhex(tx_raw_new['signed_tx']), default_chain_spec)
# assert tx_hash_new_hex != tx_dict['hash']
# assert tx_dict_new['gasPrice'] > gas_price_before
#
# tx_dict_after = get_tx(tx_dict['hash'])
#
# logg.debug('logggg {}'.format(status_str(tx_dict_after['status'])))
# assert tx_dict_after['status'] & StatusBits.MANUAL
#def test_check_fix_nonce(
# default_chain_spec,
# init_database,
# init_eth_account_roles,
# init_w3,
# eth_empty_accounts,
# celery_session_worker,
# ):
#
# chain_str = str(default_chain_spec)
#
# sigs = []
# for i in range(5):
# s = celery.signature(
# 'cic_eth.eth.gas.refill_gas',
# [
# eth_empty_accounts[i],
# chain_str,
# ],
# queue=None,
# )
# sigs.append(s)
#
# t = celery.group(sigs)()
# txs = t.get()
# assert t.successful()
#
# tx_hash = web3.Web3.keccak(hexstr=txs[2])
# c = RpcClient(default_chain_spec)
# api = AdminApi(c, queue=None)
# address = init_eth_account_roles['eth_account_gas_provider']
# nonce_spec = api.check_nonce(address)
# assert nonce_spec['nonce']['network'] == 0
# assert nonce_spec['nonce']['queue'] == 4
# assert nonce_spec['nonce']['blocking'] == None
#
# s_set = celery.signature(
# 'cic_eth.queue.tx.set_rejected',
# [
# tx_hash.hex(),
# ],
# queue=None,
# )
# t = s_set.apply_async()
# t.get()
# t.collect()
# assert t.successful()
#
#
# nonce_spec = api.check_nonce(address)
# assert nonce_spec['nonce']['blocking'] == 2
# assert nonce_spec['tx']['blocking'] == tx_hash.hex()
#
# t = api.fix_nonce(address, nonce_spec['nonce']['blocking'])
# t.get()
# t.collect()
# assert t.successful()
#
# for tx in txs[3:]:
# tx_hash = web3.Web3.keccak(hexstr=tx)
# tx_dict = get_tx(tx_hash.hex())
# assert tx_dict['status'] == StatusEnum.OVERRIDDEN
#
#
def test_have_account( def test_have_account(
default_chain_spec, default_chain_spec,
custodial_roles, custodial_roles,
@@ -243,28 +113,6 @@ def test_tag_account(
assert AccountRole.get_address('bar', init_database) == agent_roles['CAROL'] assert AccountRole.get_address('bar', init_database) == agent_roles['CAROL']
#def test_ready(
# init_database,
# agent_roles,
# eth_rpc,
# ):
#
# api = AdminApi(eth_rpc)
#
# with pytest.raises(InitializationError):
# api.ready()
#
# bogus_account = os.urandom(20)
# bogus_account_hex = '0x' + bogus_account.hex()
#
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', web3.Web3.toChecksumAddress(bogus_account_hex))
# with pytest.raises(KeyError):
# api.ready()
#
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', eth_empty_accounts[0])
# api.ready()
def test_tx( def test_tx(
default_chain_spec, default_chain_spec,
cic_registry, cic_registry,
@@ -286,3 +134,168 @@ def test_tx(
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT']) api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT'])
tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex) tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)
logg.warning('code missing to verify tx contents {}'.format(tx)) logg.warning('code missing to verify tx contents {}'.format(tx))
def test_check_nonce_gap(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
caplog,
):
# NOTE: this only works as long as agents roles start at nonce 0
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 0)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
tx_hashes = []
txs = []
j = 0
for i in range(10):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
if i == 3:
j = 1
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], i+1)
queue_create(
default_chain_spec,
i+j,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT'])
r = api.check_nonce(default_chain_spec, agent_roles['ALICE'])
assert r['nonce']['blocking'] == 4
assert r['tx']['blocking'] == tx_hashes[3] # one less because there is a gap
def test_check_nonce_localfail(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
caplog,
):
# NOTE: this only works as long as agents roles start at nonce 0
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 0)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
tx_hashes = []
txs = []
j = 0
for i in range(10):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
i,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex)
set_ready(default_chain_spec, tx_hashes[4], session=init_database)
set_reserved(default_chain_spec, tx_hashes[4], session=init_database)
set_fubar(default_chain_spec, tx_hashes[4], session=init_database)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT'])
r = api.check_nonce(default_chain_spec, agent_roles['ALICE'])
assert r['nonce']['blocking'] == 4
assert r['tx']['blocking'] == tx_hashes[4]
def test_fix_nonce(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
init_celery_tasks,
caplog,
):
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 0)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
tx_hashes = []
txs = []
for i in range(10):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
i,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT'])
t = api.fix_nonce(default_chain_spec, agent_roles['ALICE'], 3)
r = t.get_leaf()
assert t.successful()
init_database.commit()
txs = get_nonce_tx_cache(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys()
assert len(ks) == 2
for k in ks:
hsh = add_0x(k)
otx = Otx.load(hsh, session=init_database)
init_database.refresh(otx)
logg.debug('checking nonce {} tx {} status {}'.format(3, otx.tx_hash, otx.status))
if add_0x(k) == tx_hashes[3]:
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
else:
assert otx.status == 1

View File

@@ -0,0 +1,373 @@
# standard imports
import logging
import io
import json
# external imports
import pytest
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import (
nonce,
OverrideNonceOracle,
RPCNonceOracle,
)
from chainqueue.sql.tx import create as queue_create
from chainlib.eth.tx import (
raw,
receipt,
TxFormat,
Tx,
)
from chainlib.eth.block import block_latest
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainqueue.sql.state import (
set_reserved,
set_sent,
set_ready,
)
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits
from chainqueue.sql.query import get_nonce_tx_cache
from eth_erc20 import ERC20
from cic_eth_registry import CICRegistry
# local imports
from cic_eth.api.api_admin import AdminApi
from cic_eth.eth.gas import cache_gas_data
from cic_eth.eth.erc20 import cache_transfer_data
logg = logging.getLogger()
def test_admin_api_tx(
default_chain_spec,
init_database,
init_celery_tasks,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
custodial_roles,
celery_session_worker,
foo_token,
address_declarator,
cic_registry,
register_tokens,
register_lookups,
caplog,
):
nonce_oracle = RPCNonceOracle(custodial_roles['FOO_TOKEN_GIFTER'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(limit=100000, conn=eth_rpc)
o = nonce(custodial_roles['FOO_TOKEN_GIFTER'])
r = eth_rpc.do(o)
gifter_nonce = int(r, 16)
#c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(foo_token, custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
gifter_nonce, # will only work if agent starts at 0
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_transfer_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
init_database.commit()
o = raw(tx_signed_raw_hex)
eth_rpc.do(o)
o = receipt(tx_hash_hex)
r = eth_rpc.do(o)
assert r['status'] == 1
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
# lookup by transaction hash, without registry
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)
logg.debug('deployed {}'.format(contract_roles['CONTRACT_DEPLOYER']))
assert tx['tx_hash'] == tx_hash_hex
# lookup by RLP transaction, without registry
tx = api.tx(default_chain_spec, tx_raw=tx_signed_raw_hex)
assert tx['tx_hash'] == tx_hash_hex
# lookup by transaction hash, with registry
registry = CICRegistry(default_chain_spec, eth_rpc)
tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex, registry=registry)
assert tx['tx_hash'] == tx_hash_hex
# lookup by transaction hash, using writer
buf = io.StringIO()
api.tx(default_chain_spec, tx_hash=tx_hash_hex, renderer=json.dumps, w=buf)
tx = json.loads(buf.getvalue())
assert tx['tx_hash'] == tx_hash_hex
def test_admin_api_account(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
caplog,
):
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
tx_hashes_alice = []
txs_alice = []
for i in range(3):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42+i,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes_alice.append(tx_hash_hex)
txs_alice.append(tx_signed_raw_hex)
init_database.commit()
nonce_oracle = OverrideNonceOracle(agent_roles['BOB'], 13)
tx_hashes_bob = []
txs_bob = []
for i in range(2):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['BOB'], agent_roles['ALICE'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
13+i,
agent_roles['BOB'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes_bob.append(tx_hash_hex)
txs_bob.append(tx_signed_raw_hex)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
r = api.account(default_chain_spec, agent_roles['ALICE'])
assert len(r) == 5
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
r = api.account(default_chain_spec, agent_roles['ALICE'], include_sender=False)
assert len(r) == 2
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
r = api.account(default_chain_spec, agent_roles['ALICE'], include_recipient=False)
assert len(r) == 3
def test_admin_api_account_writer(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
caplog,
):
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
init_database.commit()
buf = io.StringIO()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
api.account(default_chain_spec, agent_roles['ALICE'], renderer=json.dumps, w=buf)
# TODO: improve eval
tx = json.loads(buf.getvalue())
assert tx['tx_hash'] == tx_hash_hex
def test_registry(
eth_rpc,
cic_registry,
contract_roles,
celery_session_worker,
):
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
t = api.registry()
r = t.get_leaf()
assert r == cic_registry
def test_proxy_do(
default_chain_spec,
eth_rpc,
contract_roles,
celery_session_worker,
):
o = block_latest()
r = eth_rpc.do(o)
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
t = api.proxy_do(default_chain_spec, o)
rr = t.get_leaf()
assert r == rr
def test_resend_inplace(
init_database,
default_chain_spec,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
t = api.resend(tx_hash_hex, default_chain_spec, unlock=True)
r = t.get_leaf()
assert t.successful()
otx = Otx.load(tx_hash_hex, session=init_database)
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
assert len(txs) == 2
@pytest.mark.xfail()
def test_resend_clone(
init_database,
default_chain_spec,
eth_rpc,
eth_signer,
agent_roles,
contract_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
init_database.commit()
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['CONTRACT_DEPLOYER'])
t = api.resend(tx_hash_hex, default_chain_spec, in_place=False)
r = t.get_leaf()
assert t.successful()
otx = Otx.load(tx_hash_hex, session=init_database)
assert otx.status & StatusBits.IN_NETWORK == StatusBits.IN_NETWORK
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
assert len(txs) == 1
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
assert len(txs) == 1
otx = Otx.load(txs[0], session=init_database)
assert otx.status == 0

View File

@@ -8,11 +8,20 @@ import pytest
import celery import celery
from cic_eth_registry.erc20 import ERC20Token from cic_eth_registry.erc20 import ERC20Token
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from eth_accounts_index import AccountsIndex
from chainlib.eth.tx import (
transaction,
)
from chainqueue.sql.state import (
set_reserved,
)
# local imports # local imports
from cic_eth.api import Api from cic_eth.api import Api
from cic_eth.queue.query import get_tx
logg = logging.getLogger(__name__) #logg = logging.getLogger(__name__)
logg = logging.getLogger()
def test_account_api( def test_account_api(
@@ -29,6 +38,47 @@ def test_account_api(
assert t.successful() assert t.successful()
def test_account_api_register(
default_chain_spec,
init_database,
account_registry,
faucet,
custodial_roles,
cic_registry,
register_lookups,
eth_rpc,
celery_session_worker,
):
api = Api(str(default_chain_spec), callback_param='accounts', callback_task='cic_eth.callbacks.noop.noop', queue=None)
t = api.create_account('')
register_tx_hash = t.get_leaf()
assert t.successful()
set_reserved(default_chain_spec, register_tx_hash, session=init_database)
tx = get_tx(default_chain_spec.asdict(), register_tx_hash)
s = celery.signature(
'cic_eth.eth.tx.send',
[
[tx['signed_tx']],
default_chain_spec.asdict(),
],
queue=None
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
o = transaction(register_tx_hash)
tx_src = eth_rpc.do(o)
c = AccountsIndex(default_chain_spec)
address = c.parse_add_request(tx_src['data'])
o = c.have(account_registry, address[0], sender_address=custodial_roles['CONTRACT_DEPLOYER'])
r = eth_rpc.do(o)
assert c.parse_have(r)
def test_transfer_api( def test_transfer_api(
default_chain_spec, default_chain_spec,
eth_rpc, eth_rpc,
@@ -37,16 +87,15 @@ def test_transfer_api(
custodial_roles, custodial_roles,
agent_roles, agent_roles,
cic_registry, cic_registry,
register_tokens, token_registry,
register_lookups, register_lookups,
celery_session_worker, celery_session_worker,
register_tokens,
foo_token_symbol,
): ):
#token = CICRegistry.get_address(default_chain_spec, bancor_tokens[0])
foo_token_cache = ERC20Token(default_chain_spec, eth_rpc, foo_token)
api = Api(str(default_chain_spec), callback_param='transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None) api = Api(str(default_chain_spec), callback_param='transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None)
t = api.transfer(custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1024, foo_token_cache.symbol) t = api.transfer(custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1, foo_token_symbol)
t.get_leaf() t.get_leaf()
assert t.successful() assert t.successful()

View File

@@ -0,0 +1,19 @@
# local imports
from cic_eth.api.api_task import Api
from cic_eth.task import BaseTask
def test_default_token(
default_chain_spec,
foo_token,
default_token,
token_registry,
register_tokens,
register_lookups,
cic_registry,
celery_session_worker,
):
api = Api(str(default_chain_spec), queue=None)
t = api.default_token()
r = t.get_leaf()
assert r['address'] == foo_token

View File

@@ -4,7 +4,7 @@ import logging
# external imports # external imports
import pytest import pytest
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.erc20 import ERC20 from eth_erc20 import ERC20
from chainlib.eth.tx import receipt from chainlib.eth.tx import receipt
# local imports # local imports

View File

@@ -1 +1 @@
from tests.fixtures_celery import * from cic_eth.pytest.fixtures_celery import *

View File

@@ -9,7 +9,7 @@ import celery
from chainlib.connection import RPCConnection from chainlib.connection import RPCConnection
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import receipt from chainlib.eth.tx import receipt
from eth_accounts_index import AccountRegistry from eth_accounts_index.registry import AccountRegistry
from hexathon import strip_0x from hexathon import strip_0x
from chainqueue.db.enum import StatusEnum from chainqueue.db.enum import StatusEnum
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
@@ -156,6 +156,7 @@ def test_gift(
eth_signer, eth_signer,
init_celery_tasks, init_celery_tasks,
cic_registry, cic_registry,
register_lookups,
celery_session_worker, celery_session_worker,
): ):

View File

@@ -0,0 +1,88 @@
# standard imports
import logging
# external imports
import celery
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import OverrideNonceOracle
from chainqueue.sql.tx import (
create as queue_create,
)
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainlib.eth.tx import TxFormat
from chainqueue.sql.query import get_nonce_tx_cache
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits
from hexathon import add_0x
# local imports
from cic_eth.admin.nonce import shift_nonce
from cic_eth.eth.gas import cache_gas_data
logg = logging.getLogger()
def test_shift_nonce(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
celery_session_worker,
caplog,
):
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(limit=21000, conn=eth_rpc)
tx_hashes = []
txs = []
for i in range(10):
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42+i,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex)
init_database.commit()
s = celery.signature(
'cic_eth.admin.nonce.shift_nonce',
[
default_chain_spec.asdict(),
tx_hashes[3],
],
queue=None
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
init_database.commit()
for i in range(42+3, 42+10):
txs = get_nonce_tx_cache(default_chain_spec, i, agent_roles['ALICE'], session=init_database)
for k in txs.keys():
hsh = add_0x(k)
otx = Otx.load(hsh, session=init_database)
logg.debug('checking nonce {} tx {} status {}'.format(i, otx.tx_hash, otx.status))
if add_0x(k) == tx_hashes[i-42]:
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
else:
assert otx.status == 1

View File

@@ -4,7 +4,7 @@ import logging
# external imports # external imports
import pytest import pytest
import celery import celery
from chainlib.eth.erc20 import ERC20 from eth_erc20 import ERC20
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import ( from chainlib.eth.tx import (
receipt, receipt,
@@ -13,6 +13,7 @@ from chainlib.eth.tx import (
# local imports # local imports
from cic_eth.queue.tx import register_tx from cic_eth.queue.tx import register_tx
from cic_eth.error import YouAreBrokeError
logg = logging.getLogger() logg = logging.getLogger()
@@ -167,3 +168,101 @@ def test_erc20_approve_task(
r = t.get_leaf() r = t.get_leaf()
logg.debug('result {}'.format(r)) logg.debug('result {}'.format(r))
def test_erc20_transfer_from_task(
default_chain_spec,
foo_token,
agent_roles,
custodial_roles,
eth_signer,
eth_rpc,
init_database,
celery_session_worker,
token_roles,
):
token_object = {
'address': foo_token,
}
transfer_value = 100 * (10 ** 6)
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], conn=eth_rpc)
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
(tx_hash, o) = c.approve(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value)
r = eth_rpc.do(o)
o = receipt(tx_hash)
r = eth_rpc.do(o)
assert r['status'] == 1
s_nonce = celery.signature(
'cic_eth.eth.nonce.reserve_nonce',
[
[token_object],
default_chain_spec.asdict(),
custodial_roles['FOO_TOKEN_GIFTER'],
],
queue=None,
)
s_transfer = celery.signature(
'cic_eth.eth.erc20.transfer_from',
[
custodial_roles['FOO_TOKEN_GIFTER'],
agent_roles['BOB'],
transfer_value,
default_chain_spec.asdict(),
agent_roles['ALICE'],
],
queue=None,
)
s_nonce.link(s_transfer)
t = s_nonce.apply_async()
r = t.get_leaf()
logg.debug('result {}'.format(r))
def test_erc20_allowance_check_task(
default_chain_spec,
foo_token,
agent_roles,
custodial_roles,
eth_signer,
eth_rpc,
init_database,
celery_session_worker,
token_roles,
):
token_object = {
'address': foo_token,
'symbol': 'FOO',
}
transfer_value = 100 * (10 ** 6)
s_check = celery.signature(
'cic_eth.eth.erc20.check_allowance',
[
[token_object],
custodial_roles['FOO_TOKEN_GIFTER'],
transfer_value,
default_chain_spec.asdict(),
agent_roles['ALICE']
],
queue=None,
)
t = s_check.apply_async()
with pytest.raises(YouAreBrokeError):
t.get()
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], conn=eth_rpc)
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
(tx_hash, o) = c.approve(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value)
r = eth_rpc.do(o)
o = receipt(tx_hash)
r = eth_rpc.do(o)
assert r['status'] == 1
t = s_check.apply_async()
t.get()
assert t.successful()

View File

@@ -0,0 +1,286 @@
# standard imports
import logging
# external imports
import celery
import pytest
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import (
OverrideNonceOracle,
RPCNonceOracle,
)
from chainlib.eth.gas import (
OverrideGasOracle,
Gas,
)
from chainlib.eth.tx import (
unpack,
TxFormat,
)
from chainlib.eth.constant import (
MINIMUM_FEE_UNITS,
MINIMUM_FEE_PRICE,
)
from chainqueue.sql.tx import create as queue_create
from chainqueue.sql.query import get_tx
from chainqueue.db.enum import StatusBits
from chainqueue.sql.state import (
set_ready,
set_reserved,
set_sent,
)
from chainqueue.db.models.otx import Otx
from hexathon import strip_0x
# local imports
from cic_eth.eth.gas import cache_gas_data
from cic_eth.error import OutOfGasError
logg = logging.getLogger()
def test_task_check_gas_ok(
default_chain_spec,
eth_rpc,
eth_signer,
init_database,
agent_roles,
custodial_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
0,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
init_database.commit()
s = celery.signature(
'cic_eth.eth.gas.check_gas',
[
[
tx_hash_hex,
],
default_chain_spec.asdict(),
[],
None,
8000000,
],
queue=None
)
t = s.apply_async()
t.get_leaf()
assert t.successful()
init_database.commit()
tx = get_tx(default_chain_spec, tx_hash_hex, session=init_database)
assert tx['status'] & StatusBits.QUEUED == StatusBits.QUEUED
def test_task_check_gas_insufficient(
default_chain_spec,
eth_rpc,
eth_signer,
init_database,
agent_roles,
custodial_roles,
celery_session_worker,
whoever,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(whoever, 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(whoever, agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
whoever,
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
init_database.commit()
s = celery.signature(
'cic_eth.eth.gas.check_gas',
[
[
tx_hash_hex,
],
default_chain_spec.asdict(),
[],
None,
None,
],
queue=None
)
t = s.apply_async()
try:
r = t.get_leaf()
except OutOfGasError:
pass
init_database.commit()
tx = get_tx(default_chain_spec, tx_hash_hex, session=init_database)
assert tx['status'] & StatusBits.GAS_ISSUES == StatusBits.GAS_ISSUES
def test_task_check_gas_low(
default_chain_spec,
eth_rpc,
eth_signer,
init_database,
agent_roles,
custodial_roles,
celery_session_worker,
whoever,
):
gas_oracle = OverrideGasOracle(price=MINIMUM_FEE_PRICE, limit=MINIMUM_FEE_UNITS)
nonce_oracle = RPCNonceOracle(custodial_roles['GAS_GIFTER'], conn=eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.create(custodial_roles['GAS_GIFTER'], whoever, 100 * (10 ** 6))
r = eth_rpc.do(o)
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = RPCNonceOracle(whoever, conn=eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(whoever, agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
0,
whoever,
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
init_database.commit()
s = celery.signature(
'cic_eth.eth.gas.check_gas',
[
[
tx_hash_hex,
],
default_chain_spec.asdict(),
],
[],
None,
None,
queue=None
)
t = s.apply_async()
t.get_leaf()
assert t.successful()
init_database.commit()
tx = get_tx(default_chain_spec, tx_hash_hex, session=init_database)
assert tx['status'] & StatusBits.QUEUED == StatusBits.QUEUED
@pytest.mark.parametrize(
'_gas_price,_gas_factor',
[
(None, 1.1),
(MINIMUM_FEE_PRICE * 1.1, 0.9),
(None, 1.3),
]
)
def test_task_resend_explicit(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
custodial_roles,
celery_session_worker,
_gas_price,
_gas_factor,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
0,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
tx_before = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec)
init_database.commit()
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
s = celery.signature(
'cic_eth.eth.gas.resend_with_higher_gas',
[
tx_hash_hex,
default_chain_spec.asdict(),
_gas_price,
_gas_factor,
],
queue=None
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
q = init_database.query(Otx)
q = q.filter(Otx.tx_hash==strip_0x(r))
otx = q.first()
if otx == None:
raise NotLocalTxError(r)
tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec)
logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice']))
assert tx_after['gasPrice'] > tx_before['gasPrice']

View File

@@ -4,16 +4,27 @@ import logging
# external imports # external imports
import pytest import pytest
import celery import celery
from chainlib.eth.gas import Gas from chainlib.eth.gas import (
OverrideGasOracle,
Gas,
)
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import ( from chainlib.eth.tx import (
TxFormat, TxFormat,
unpack, unpack,
transaction, transaction,
receipt, receipt,
raw,
) )
from hexathon import strip_0x from hexathon import strip_0x
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.sql.tx import create as queue_create
from chainqueue.sql.state import (
set_reserved,
set_ready,
set_sent,
)
from chainqueue.db.enum import StatusBits
# local imports # local imports
from cic_eth.queue.tx import register_tx from cic_eth.queue.tx import register_tx
@@ -60,15 +71,6 @@ def test_tx_send(
def test_sync_tx( def test_sync_tx(
default_chain_spec,
eth_rpc,
eth_signer,
celery_session_worker,
):
pass
def test_resend_with_higher_gas(
init_database, init_database,
default_chain_spec, default_chain_spec,
eth_rpc, eth_rpc,
@@ -77,31 +79,48 @@ def test_resend_with_higher_gas(
celery_session_worker, celery_session_worker,
): ):
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], conn=eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED) c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict())
tx_before = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
o = raw(tx_signed_raw_hex)
r = eth_rpc.do(o)
o = receipt(tx_hash_hex)
r = eth_rpc.do(o)
assert r['status'] == 1
s = celery.signature( s = celery.signature(
'cic_eth.eth.gas.resend_with_higher_gas', 'cic_eth.eth.tx.sync_tx',
[ [
tx_hash_hex, tx_hash_hex,
default_chain_spec.asdict(), default_chain_spec.asdict(),
], ],
queue=None, queue=None
) )
t = s.apply_async() t = s.apply_async()
r = t.get_leaf() r = t.get_leaf()
assert t.successful()
q = init_database.query(Otx) init_database.commit()
q = q.filter(Otx.tx_hash==strip_0x(r))
otx = q.first()
if otx == None:
raise NotLocalTxError(r)
tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec)
logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice']))
assert tx_after['gasPrice'] > tx_before['gasPrice']
o = Otx.load(tx_hash_hex, session=init_database)
assert o.status & StatusBits.FINAL == StatusBits.FINAL

View File

@@ -0,0 +1,170 @@
# standard imports
import os
import logging
# external imports
import pytest
import celery
from chainqueue.sql.tx import create as queue_create
from chainlib.eth.nonce import (
RPCNonceOracle,
OverrideNonceOracle,
)
from chainlib.eth.gas import (
OverrideGasOracle,
Gas,
)
from chainlib.eth.tx import (
TxFormat,
unpack,
receipt,
)
from hexathon import (
add_0x,
strip_0x,
)
from chainqueue.sql.state import (
set_reserved,
set_ready,
)
logg = logging.getLogger()
def test_hashes_to_txs(
init_database,
default_chain_spec,
agent_roles,
eth_rpc,
eth_signer,
celery_session_worker,
):
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex_one, tx_signed_raw_hex_one) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex_one,
tx_signed_raw_hex_one,
session=init_database,
)
#nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 43)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex_two, tx_signed_raw_hex_two) = c.create(agent_roles['ALICE'], agent_roles['CAROL'], 200 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
43,
agent_roles['ALICE'],
tx_hash_hex_two,
tx_signed_raw_hex_two,
session=init_database,
)
init_database.commit()
bogus_one = add_0x(os.urandom(32).hex())
bogus_two = add_0x(os.urandom(32).hex())
yarrgs = [
bogus_one,
tx_hash_hex_two,
bogus_two,
tx_hash_hex_one,
]
s = celery.signature(
'cic_eth.eth.tx.hashes_to_txs',
[
yarrgs,
],
queue=None,
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
assert len(r) == 2
logg.debug('r {}'.format(r))
txs = [
tx_signed_raw_hex_two,
tx_signed_raw_hex_one,
]
for tx in r:
txs.remove(add_0x(tx))
assert len(txs) == 0
def test_double_send(
init_database,
default_chain_spec,
agent_roles,
eth_rpc,
eth_signer,
celery_session_worker,
):
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex_one, tx_signed_raw_hex_one) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex_one,
tx_signed_raw_hex_one,
session=init_database,
)
set_ready(default_chain_spec, tx_hash_hex_one, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex_one, session=init_database)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex_two, tx_signed_raw_hex_two) = c.create(agent_roles['ALICE'], agent_roles['CAROL'], 200 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
43,
agent_roles['ALICE'],
tx_hash_hex_two,
tx_signed_raw_hex_two,
session=init_database,
)
set_ready(default_chain_spec, tx_hash_hex_two, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex_two, session=init_database)
init_database.commit()
yarrgs = [
tx_signed_raw_hex_one,
tx_signed_raw_hex_two,
]
s = celery.signature(
'cic_eth.eth.tx.send',
[
yarrgs,
default_chain_spec.asdict(),
],
queue=None
)
t = s.apply_async()
r = t.get_leaf()
assert t.successful()
o = receipt(tx_hash_hex_one)
r = eth_rpc.do(o)
assert r['status'] == 1
o = receipt(tx_hash_hex_two)
r = eth_rpc.do(o)
assert r['status'] == 1

View File

@@ -3,7 +3,7 @@ from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import ( from chainlib.eth.tx import (
receipt, receipt,
) )
from eth_address_declarator import AddressDeclarator from eth_address_declarator import Declarator
from hexathon import add_0x from hexathon import add_0x
# local imports # local imports
@@ -19,11 +19,12 @@ def test_translate(
agent_roles, agent_roles,
cic_registry, cic_registry,
init_celery_tasks, init_celery_tasks,
register_lookups,
): ):
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc) nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
c = AddressDeclarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) c = Declarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
description = 'alice'.encode('utf-8').ljust(32, b'\x00').hex() description = 'alice'.encode('utf-8').ljust(32, b'\x00').hex()
(tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['CONTRACT_DEPLOYER'], agent_roles['ALICE'], add_0x(description)) (tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['CONTRACT_DEPLOYER'], agent_roles['ALICE'], add_0x(description))

View File

@@ -8,7 +8,7 @@ from chainlib.eth.tx import (
count, count,
receipt, receipt,
) )
from chainlib.eth.erc20 import ERC20 from eth_erc20 import ERC20
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
# local imports # local imports

View File

@@ -0,0 +1,22 @@
# local imports
from cic_eth.registry import *
def test_registry_connect(
eth_rpc,
default_chain_spec,
address_declarator,
token_registry,
contract_roles,
purge_lookups,
registry,
agent_roles,
):
r = connect(eth_rpc, default_chain_spec, registry, sender_address=contract_roles['CONTRACT_DEPLOYER'])
connect_declarator(eth_rpc, default_chain_spec, [agent_roles['ALICE']], sender_address=contract_roles['CONTRACT_DEPLOYER'])
r.by_name('AddressDeclarator', sender_address=contract_roles['CONTRACT_DEPLOYER'])
connect_token_registry(eth_rpc, default_chain_spec, sender_address=contract_roles['CONTRACT_DEPLOYER'])
r.by_name('TokenRegistry', sender_address=contract_roles['CONTRACT_DEPLOYER'])

View File

@@ -0,0 +1,18 @@
# standard imports
import datetime
# local imports
from cic_eth.stat import init_chain_stat
def test_chain_stat(
eth_rpc,
init_eth_tester,
):
now = int(datetime.datetime.now().timestamp()) + 1
for i in range(11):
init_eth_tester.time_travel(now + (i * 2))
s = init_chain_stat(eth_rpc, block_start=0)
assert s.block_average() == 2

View File

@@ -3,3 +3,5 @@ dist
dist-web dist-web
dist-server dist-server
scratch scratch
coverage
.nyc_output

View File

@@ -3,17 +3,38 @@
variables: variables:
APP_NAME: cic-meta APP_NAME: cic-meta
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
IMAGE_TAG: $CI_REGISTRY_IMAGE/$APP_NAME:unittest-$CI_COMMIT_SHORT_SHA
.cic_meta_changes_target: .cic_meta_changes_target:
rules: rules:
- changes: - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- $CONTEXT/$APP_NAME/* # - changes:
# - $CONTEXT/$APP_NAME/*
- when: always
build-mr-cic-meta: cic-meta-build-mr:
stage: build
extends:
- .cic_meta_variables
- .cic_meta_changes_target
script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > "/kaniko/.docker/config.json"
# - /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG
- /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG
test-mr-cic-meta:
extends: extends:
- .cic_meta_changes_target
- .py_build_merge_request
- .cic_meta_variables - .cic_meta_variables
- .cic_meta_changes_target
stage: test
image: $IMAGE_TAG
script:
- cd /tmp/src/cic-meta
- npm install --dev
- npm run test
- npm run test:coverage
needs: ["cic-meta-build-mr"]
build-push-cic-meta: build-push-cic-meta:
extends: extends:

51
apps/cic-meta/bin/get.js Executable file
View File

@@ -0,0 +1,51 @@
#!/usr/bin/env node
const colors = require('colors');
const {Meta} = require("../dist");
let { argv } = require('yargs')
.usage('Usage: $0 -m http://localhost:63380 -n publickeys')
.example(
'$0 -m http://localhost:63380 -n publickeys',
'Fetches the public keys blob from the meta server'
)
.option('m', {
alias: 'metaurl',
describe: 'The URL for the meta service',
demandOption: 'The meta url is required',
type: 'string',
nargs: 1,
})
.option('n', {
alias: 'name',
describe: 'The name of the resource to be fetched from the meta service',
demandOption: 'The name of the resource is required',
type: 'string',
nargs: 1,
})
.option('t', {
alias: 'type',
describe: 'The type of resource to be fetched from the meta service\n' +
'Options: `user`, `phone` and `custom`\n' +
'Defaults to `custom`',
type: 'string',
nargs: 1,
})
.epilog('Grassroots Economics (c) 2021')
.wrap(null);
const metaUrl = argv.m;
const resourceName = argv.n;
let type = argv.t;
if (type === undefined) {
type = 'custom'
}
(async () => {
const identifier = await Meta.getIdentifier(resourceName, type);
console.log(colors.cyan(`Meta server storage identifier: ${identifier}`));
const metaResponse = await Meta.get(identifier, metaUrl);
if (typeof metaResponse !== "object") {
console.error(colors.red('Metadata get failed!'));
}
console.log(colors.green(metaResponse));
})();

81
apps/cic-meta/bin/set.js Executable file
View File

@@ -0,0 +1,81 @@
#!/usr/bin/env node
const fs = require("fs");
const colors = require('colors');
const {Meta} = require("../dist");
let { argv } = require('yargs')
.usage('Usage: $0 -m http://localhost:63380 -k ./privatekeys.asc -n publickeys -r ./publickeys.asc')
.example(
'$0 -m http://localhost:63380 -k ./privatekeys.asc -n publickeys -r ./publickeys.asc',
'Updates the public keys blob to the meta server'
)
.option('m', {
alias: 'metaurl',
describe: 'The URL for the meta service',
demandOption: 'The meta url is required',
type: 'string',
nargs: 1,
})
.option('k', {
alias: 'privatekey',
describe: 'The PGP private key blob file used to sign the changes to the meta service',
demandOption: 'The private key file is required',
type: 'string',
nargs: 1,
})
.option('n', {
alias: 'name',
describe: 'The name of the resource to be set or updated to the meta service',
demandOption: 'The name of the resource is required',
type: 'string',
nargs: 1,
})
.option('r', {
alias: 'resource',
describe: 'The resource file to be set or updated to the meta service',
demandOption: 'The resource file is required',
type: 'string',
nargs: 1,
})
.option('t', {
alias: 'type',
describe: 'The type of resource to be set or updated to the meta service\n' +
'Options: `user`, `phone` and `custom`\n' +
'Defaults to `custom`',
type: 'string',
nargs: 1,
})
.epilog('Grassroots Economics (c) 2021')
.wrap(null);
const metaUrl = argv.m;
const privateKeyFile = argv.k;
const resourceName = argv.n;
const resourceFile = argv.r;
let type = argv.t;
if (type === undefined) {
type = 'custom'
}
const privateKey = readFile(privateKeyFile);
const resource = readFile(resourceFile);
(async () => {
if (privateKey && resource) {
const identifier = await Meta.getIdentifier(resourceName, type);
console.log(colors.cyan(`Meta server storage identifier: ${identifier}`));
const meta = new Meta(metaUrl, privateKey);
meta.onload = async (status) => {
const response = await meta.set(identifier, resource)
console.log(colors.green(response));
}
}
})();
function readFile(filename) {
if(!fs.existsSync(filename)) {
console.log(colors.red(`File ${filename} not found`));
return;
}
return fs.readFileSync(filename, {encoding: 'utf8', flag: 'r'});
}

View File

@@ -4,29 +4,28 @@ WORKDIR /tmp/src/cic-meta
RUN apk add --no-cache postgresql bash RUN apk add --no-cache postgresql bash
COPY cic-meta/package.json \ # required to build the cic-client-meta module
./
COPY cic-meta/src/ src/ COPY cic-meta/src/ src/
COPY cic-meta/tests/ tests/
COPY cic-meta/scripts/ scripts/ COPY cic-meta/scripts/ scripts/
# copy the dependencies
COPY cic-meta/package.json .
COPY cic-meta/tsconfig.json .
COPY cic-meta/webpack.config.js .
RUN npm install RUN npm install
# see exports_dir gpg.ini COPY cic-meta/tests/ tests/
COPY cic-meta/tests/*.asc /root/pgp/ COPY cic-meta/tests/*.asc /root/pgp/
RUN alias tsc=node_modules/typescript/bin/tsc
# copy runtime configs
COPY cic-meta/.config/ /usr/local/etc/cic-meta/ COPY cic-meta/.config/ /usr/local/etc/cic-meta/
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
# db migrations
COPY cic-meta/docker/db.sh ./db.sh COPY cic-meta/docker/db.sh ./db.sh
RUN chmod 755 ./db.sh RUN chmod 755 ./db.sh
#RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js RUN alias tsc=node_modules/typescript/bin/tsc
#ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
COPY cic-meta/docker/start_server.sh ./start_server.sh COPY cic-meta/docker/start_server.sh ./start_server.sh
RUN chmod 755 ./start_server.sh RUN chmod 755 ./start_server.sh
ENTRYPOINT ["sh", "./start_server.sh"] ENTRYPOINT ["sh", "./start_server.sh"]

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,30 @@
{ {
"name": "cic-client-meta", "name": "@cicnet/cic-client-meta",
"version": "0.0.7-alpha.8", "version": "0.0.11",
"description": "Signed CRDT metadata graphs for the CIC network", "description": "Signed CRDT metadata graphs for the CIC network",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
"bin": {
"meta-set": "bin/set.js",
"meta-get": "bin/get.js"
},
"preferGlobal": true,
"scripts": { "scripts": {
"test": "mocha -r node_modules/node-localstorage/register -r ts-node/register tests/*.ts", "test": "mocha -r node_modules/node-localstorage/register -r ts-node/register tests/*.ts",
"test:coverage": "nyc mocha tests/*.ts --timeout 3000 --check-coverage=true",
"build": "node_modules/typescript/bin/tsc -d --outDir dist src/index.ts", "build": "node_modules/typescript/bin/tsc -d --outDir dist src/index.ts",
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts", "build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack", "pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"prepare": "npm run build && npm run build-server", "prepare": "npm run build && npm run build-server",
"start": "./node_modules/ts-node/dist/bin.js ./scripts/server/server.ts" "start": "./node_modules/ts-node/dist/bin.js ./scripts/server/server.ts",
"publish": "npm publish --access public"
}, },
"dependencies": { "dependencies": {
"@cicnet/crdt-meta": "^0.0.10",
"@ethereumjs/tx": "^3.0.0-beta.1", "@ethereumjs/tx": "^3.0.0-beta.1",
"automerge": "^0.14.1", "automerge": "^0.14.1",
"crdt-meta": "0.0.8", "colors": "^1.4.0",
"ethereumjs-wallet": "^1.0.1", "ethereumjs-wallet": "^1.0.1",
"ini": "^1.3.8", "ini": "^1.3.8",
"openpgp": "^4.10.8", "openpgp": "^4.10.8",
@@ -27,7 +35,9 @@
"devDependencies": { "devDependencies": {
"@types/mocha": "^8.0.3", "@types/mocha": "^8.0.3",
"mocha": "^8.2.0", "mocha": "^8.2.0",
"nock": "^13.1.0",
"node-localstorage": "^2.1.6", "node-localstorage": "^2.1.6",
"nyc": "^15.1.0",
"ts-node": "^9.0.0", "ts-node": "^9.0.0",
"typescript": "^4.0.5", "typescript": "^4.0.5",
"webpack": "^5.4.0", "webpack": "^5.4.0",
@@ -43,5 +53,26 @@
"license": "GPL-3.0-or-later", "license": "GPL-3.0-or-later",
"engines": { "engines": {
"node": ">=14.16.1" "node": ">=14.16.1"
},
"nyc": {
"include": [
"src/**/*.ts"
],
"extension": [
".ts"
],
"require": [
"ts-node/register"
],
"reporter": [
"text",
"html"
],
"sourceMap": true,
"instrument": true,
"branches": "&gt;80",
"lines": "&gt;80",
"functions": "&gt;80",
"statements": "&gt;80"
} }
} }

View File

@@ -1,4 +1,4 @@
import { Config } from 'crdt-meta'; import { Config } from '@cicnet/crdt-meta';
const fs = require('fs'); const fs = require('fs');
if (process.argv[2] === undefined) { if (process.argv[2] === undefined) {

View File

@@ -1,7 +1,7 @@
import * as Automerge from 'automerge'; import * as Automerge from 'automerge';
import * as pgp from 'openpgp'; import * as pgp from 'openpgp';
import { Envelope, Syncable } from 'crdt-meta'; import { Envelope, Syncable } from '@cicnet/crdt-meta';
function handleNoMergeGet(db, digest, keystore) { function handleNoMergeGet(db, digest, keystore) {
@@ -31,7 +31,7 @@ function handleNoMergeGet(db, digest, keystore) {
doh(e); doh(e);
}); });
}).catch((e) => { }).catch((e) => {
console.error('mesage', e); console.error('message', e);
doh(e); doh(e);
}); });
}) })
@@ -46,7 +46,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
let e = undefined; let e = undefined;
let s = undefined; let s = undefined;
if (v === undefined) { if (v === undefined) {
s = new Syncable(digest, data); s = new Syncable(digest, o);
s.onwrap = (e) => { s.onwrap = (e) => {
whohoo(e.toJSON()); whohoo(e.toJSON());
}; };

View File

@@ -3,7 +3,8 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as handlers from './handlers'; import * as handlers from './handlers';
import { PGPKeyStore, PGPSigner, Config, SqliteAdapter, PostgresAdapter } from 'crdt-meta'; import { PGPKeyStore, PGPSigner, Config } from '@cicnet/crdt-meta';
import { SqliteAdapter, PostgresAdapter } from '../../src/db';
import { standardArgs } from './args'; import { standardArgs } from './args';
@@ -203,7 +204,7 @@ async function processRequest(req, res) {
} }
if (content === undefined) { if (content === undefined) {
console.error('empty onctent', data); console.error('empty content', data);
res.writeHead(400, {"Content-Type": "text/plain"}); res.writeHead(400, {"Content-Type": "text/plain"});
res.end(); res.end();
return; return;

View File

@@ -0,0 +1,27 @@
import {Addressable, mergeKey, Syncable} from "@cicnet/crdt-meta";
class Custom extends Syncable implements Addressable {
name: string
value: Object
constructor(name:string, v:Object={}) {
super('', v);
Custom.toKey(name).then((cid) => {
this.id = cid;
this.name = name;
});
}
public static async toKey(item:string, identifier: string = ':cic.custom') {
return await mergeKey(Buffer.from(item), Buffer.from(identifier));
}
public key(): string {
return this.id;
}
}
export {
Custom,
}

90
apps/cic-meta/src/db.ts Normal file
View File

@@ -0,0 +1,90 @@
import * as pg from 'pg';
import * as sqlite from 'sqlite3';
type DbConfig = {
name: string
host: string
port: number
user: string
password: string
}
interface DbAdapter {
query: (s:string, callback:(e:any, rs:any) => void) => void
close: () => void
}
const re_creatematch = /^(CREATE)/i
const re_getmatch = /^(SELECT)/i;
const re_setmatch = /^(INSERT|UPDATE)/i;
class SqliteAdapter implements DbAdapter {
db: any
constructor(dbConfig:DbConfig, callback?:(any) => void) {
this.db = new sqlite.Database(dbConfig.name); //, callback);
}
public query(s:string, callback:(e:any, rs?:any) => void): void {
const local_callback = (e, rs) => {
let r = undefined;
if (rs !== undefined) {
r = {
rowCount: rs.length,
rows: rs,
}
}
callback(e, r);
};
if (s.match(re_getmatch)) {
this.db.all(s, local_callback);
} else if (s.match(re_setmatch)) {
this.db.run(s, local_callback);
} else if (s.match(re_creatematch)) {
this.db.run(s, callback);
} else {
throw 'unhandled query';
}
}
public close() {
this.db.close();
}
}
class PostgresAdapter implements DbAdapter {
db: any
constructor(dbConfig:DbConfig) {
let o = dbConfig;
o['database'] = o.name;
this.db = new pg.Pool(o);
return this.db;
}
public query(s:string, callback:(e:any, rs:any) => void): void {
this.db.query(s, (e, rs) => {
let r = {
length: rs.rowCount,
}
rs.length = rs.rowCount;
if (e === undefined) {
e = null;
}
console.debug(e, rs);
callback(e, rs);
});
}
public close() {
this.db.end();
}
}
export {
DbConfig,
SqliteAdapter,
PostgresAdapter,
}

View File

@@ -1,2 +1,4 @@
export { User } from './user'; export { User } from './user';
export { Phone } from './phone'; export { Phone } from './phone';
export { Custom } from './custom';
export { Meta } from './meta';

128
apps/cic-meta/src/meta.ts Normal file
View File

@@ -0,0 +1,128 @@
import {ArgPair, Envelope, Syncable, MutablePgpKeyStore, PGPSigner} from "@cicnet/crdt-meta";
import {User} from "./user";
import {Phone} from "./phone";
import {Custom} from "./custom";
const fetch = require("node-fetch");
const headers = {
'Content-Type': 'application/json;charset=utf-8',
'x-cic-automerge': 'client'
};
const options = {
headers: headers,
};
class Meta {
keystore: MutablePgpKeyStore = new MutablePgpKeyStore();
signer: PGPSigner = new PGPSigner(this.keystore);
metaUrl: string;
private privateKey: string;
onload: (status: boolean) => void;
constructor(metaUrl: string, privateKey: any) {
this.metaUrl = metaUrl;
this.privateKey = privateKey;
this.keystore.loadKeyring().then(() => {
this.keystore.importPrivateKey(privateKey).then(() => this.onload(true));
});
}
async set(identifier: string, data: Object): Promise<any> {
let syncable: Syncable;
const response = await Meta.get(identifier, this.metaUrl);
if (response === `Request to ${this.metaUrl}/${identifier} failed. Connection error.`) {
return response;
} else if (typeof response !== "object" || typeof data !== "object") {
syncable = new Syncable(identifier, data);
const res = await this.updateMeta(syncable, identifier);
return `${res.status}: ${res.statusText}`;
} else {
syncable = await Meta.get(identifier, this.metaUrl);
let update: Array<ArgPair> = [];
for (const prop in data) {
update.push(new ArgPair(prop, data[prop]));
}
syncable.update(update, 'client-branch');
const res = await this.updateMeta(syncable, identifier);
return `${res.status}: ${res.statusText}`;
}
}
async updateMeta(syncable: Syncable, identifier: string): Promise<any> {
const envelope: Envelope = await this.wrap(syncable);
const reqBody: string = envelope.toJSON();
const putOptions = {
method: 'PUT',
headers: headers,
body: reqBody
};
return await fetch(`${this.metaUrl}/${identifier}`, putOptions).then(async response => {
if (response.ok) {
return Promise.resolve({
status: response.status,
statusText: response.statusText + ', Metadata updated successfully!'
});
} else {
return Promise.reject({
status: response.status,
statusText: response.statusText
});
}
});
}
static async get(identifier: string, metaUrl: string): Promise<any> {
const response = await fetch(`${metaUrl}/${identifier}`, options).then(response => {
if (response.ok) {
return (response.json());
} else {
return Promise.reject({
status: response.status,
statusText: response.statusText
});
}
}).catch(error => {
if (error.code === 'ECONNREFUSED') {
return `Request to ${metaUrl}/${identifier} failed. Connection error.`
}
return `${error.status}: ${error.statusText}`;
});
if (typeof response !== "object") {
return response;
}
return Envelope.fromJSON(JSON.stringify(response)).unwrap();
}
static async getIdentifier(name: string, type: string = 'custom'): Promise<string> {
let identifier: string;
type = type.toLowerCase();
if (type === 'user') {
identifier = await User.toKey(name);
} else if (type === 'phone') {
identifier = await Phone.toKey(name);
} else if (type === 'custom') {
identifier = await Custom.toKey(name);
} else {
identifier = await Custom.toKey(name, type);
}
return identifier;
}
wrap(syncable: Syncable): Promise<Envelope> {
return new Promise<Envelope>(async (resolve, reject) => {
syncable.setSigner(this.signer);
syncable.onwrap = async (env) => {
if (env === undefined) {
reject();
return;
}
resolve(env);
};
syncable.sign();
});
}
}
export {
Meta,
}

View File

@@ -1,4 +1,4 @@
import { Syncable, Addressable, mergeKey } from 'crdt-meta'; import { Syncable, Addressable, mergeKey } from '@cicnet/crdt-meta';
class Phone extends Syncable implements Addressable { class Phone extends Syncable implements Addressable {

View File

@@ -1,4 +1,4 @@
import { Syncable, Addressable, toAddressKey } from 'crdt-meta'; import { Syncable, Addressable, toAddressKey } from '@cicnet/crdt-meta';
const keySalt = new TextEncoder().encode(':cic.person'); const keySalt = new TextEncoder().encode(':cic.person');
class User extends Syncable implements Addressable { class User extends Syncable implements Addressable {

Some files were not shown because too many files have changed in this diff Show More