Compare commits

..

3 Commits

Author SHA1 Message Date
nolash
b7fbf7614e Bump version 2021-04-25 14:20:16 +02:00
nolash
f550748efd Merge remote-tracking branch 'origin/master' into lash/get-registry-api 2021-04-25 14:17:37 +02:00
nolash
5aeadb6770 Add registry lookup tasks, replace direct rpc from admin api tx 2021-04-24 10:35:21 +02:00
344 changed files with 3505 additions and 17149 deletions

7
.gitignore vendored
View File

@@ -8,10 +8,3 @@ gmon.out
*.egg-info
dist/
build/
**/*sqlite
**/.nyc_output
**/coverage
**/.venv
**/venv
**/dist
.idea

View File

@@ -6,9 +6,8 @@ include:
- local: 'apps/cic-notify/.gitlab-ci.yml'
- local: 'apps/cic-meta/.gitlab-ci.yml'
- local: 'apps/cic-cache/.gitlab-ci.yml'
- local: 'apps/data-seeding/.gitlab-ci.yml'
stages:
- build
- test
- publish
- release

View File

@@ -0,0 +1,34 @@
# The solc image messes up the alpine environment, so we have to go all over again
FROM python:3.8.6-slim-buster
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
LABEL spdx-license-identifier="GPL-3.0-or-later"
LABEL description="Base layer for buiding development images for the cic component suite"
RUN apt-get update && \
apt-get install -y git gcc g++ libpq-dev && \
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
RUN echo installing nodejs tooling
COPY ./dev/nvm.sh /root/
# Install nvm with node and npm
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
ENV NVM_DIR /root/.nvm
ENV NODE_VERSION 15.3.0
ENV BANCOR_NODE_VERSION 10.16.0
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
&& . $NVM_DIR/nvm.sh \
&& nvm install $NODE_VERSION \
&& nvm alias default $NODE_VERSION \
&& nvm use $NODE_VERSION \
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
# owner of these files is "1001" by default - wtf
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH

View File

@@ -0,0 +1 @@
## this is an example base image if we wanted one for all the other apps. Its just OS level things

View File

@@ -1,7 +0,0 @@
gmon.out
__pycache__
*.pyc
inc.sh
*.egg-info
build/
dist/

View File

@@ -1,20 +0,0 @@
.cic_base_variables:
variables:
APP_NAME: cic-base
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
.cic_base_changes_target:
rules:
- changes:
- $CONTEXT/$APP_NAME/*
build-mr-cic-base:
extends:
- .cic_base_changes_target
- .py_build_merge_request
- .cic_base_variables
publish_python:
extends:
- .publish_python
- .cic_base_variables

View File

@@ -1 +0,0 @@
include *requirements.txt

View File

@@ -1,8 +0,0 @@
#from . import (
# config,
# argparse,
# rpc,
# signer,
# log,
# version,
# )

View File

@@ -1,87 +0,0 @@
# standard imports
import logging
import argparse
import os
import sys
# external imports
from xdg.BaseDirectory import (
xdg_config_dirs,
load_first_config,
)
logg = logging.getLogger(__file__)
fallback_config_path = '/usr/local/etc'
xdg_config_dirs += [fallback_config_path]
default_config_dir = load_first_config('cic')
if default_config_dir == None:
default_config_dir = os.path.join('.', '.cic')
env_config_dir = os.environ.get('CONFINI_DIR', default_config_dir)
full_template = {
# (long arg and key name, short var, type, default, help,)
'provider': ('p', str, None, 'RPC provider url',),
'registry_address': ('r', str, None, 'CIC registry address',),
'keystore_file': ('y', str, None, 'Keystore file',),
'config_dir': ('c', str, env_config_dir, 'Configuration directory',),
'queue': ('q', str, 'cic-eth', 'Celery task queue',),
'chain_spec': ('i', str, None, 'Chain spec string',),
'env_prefix': (None, str, os.environ.get('CONFINI_ENV_PREFIX'), 'Environment prefix for variables to overwrite configuration',),
}
default_include_args = [
'config_dir',
'provider',
'env_prefix',
]
sub = None
def create(caller_dir, include_args=default_include_args):
argparser = argparse.ArgumentParser()
for k in include_args:
a = full_template[k]
long_flag = '--' + k.replace('_', '-')
short_flag = None
dest = None
if a[0] != None:
short_flag = '-' + a[0]
dest = a[0]
else:
dest = k
default = a[2]
if default == None and k == 'config_dir':
default = os.path.join(os.getcwd(), 'config')
if short_flag == None:
argparser.add_argument(long_flag, dest=dest, type=a[1], default=default, help=a[3])
else:
argparser.add_argument(short_flag, long_flag, dest=dest, type=a[1], default=default, help=a[3])
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
return argparser
def add(argparser, processor, name, description=None):
processor(argparser)
return argparser
def parse(argparser, logger=None):
args = argparser.parse_args(sys.argv[1:])
# handle logging input
if logger != None:
if args.vv:
logger.setLevel(logging.DEBUG)
elif args.v:
logger.setLevel(logging.INFO)
return args

View File

@@ -1,52 +0,0 @@
# standard imports
import logging
# external imports
import confini
# local imports
from .error import ConfigError
logg = logging.getLogger(__name__)
default_arg_overrides = {
'p': 'ETH_PROVIDER',
'i': 'CIC_CHAIN_SPEC',
'r': 'CIC_REGISTRY_ADDRESS',
}
def override(config, override_dict, label):
config.dict_override(override_dict, label)
config.validate()
return config
def create(config_dir, args, env_prefix=None, arg_overrides=default_arg_overrides):
# handle config input
config = None
try:
config = confini.Config(config_dir, env_prefix)
except OSError:
pass
if config == None:
raise ConfigError('directory {} not found'.format(config_dir))
config.process()
if arg_overrides != None and args != None:
override_dict = {}
for k in arg_overrides:
v = getattr(args, k)
if v != None:
override_dict[arg_overrides[k]] = v
config = override(config, override_dict, 'args')
else:
config.validate()
return config
def log(config):
logg.debug('config loaded:\n{}'.format(config))

View File

@@ -1,2 +0,0 @@
class ConfigError(Exception):
pass

View File

@@ -1,13 +0,0 @@
# external imports
from chainlib.connection import RPCConnection
from chainlib.eth.connection import EthUnixSignerConnection
from chainlib.eth.sign import (
sign_transaction,
sign_message,
)
def setup(chain_spec, evm_provider, signer_provider=None):
RPCConnection.register_location(evm_provider, chain_spec, 'default')
if signer_provider != None:
RPCConnection.register_location(signer_provider, chain_spec, 'signer', constructor=EthUnixSignerConnection)

View File

@@ -1,27 +0,0 @@
# standard imports
import logging
import os
# external imports
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore
logg = logging.getLogger(__name__)
keystore = DictKeystore()
default_passphrase = os.environ.get('ETH_PASSPHRASE', '')
def from_keystore(keyfile, passphrase=default_passphrase):
global keystore
# signer
if keyfile == None:
raise ValueError('please specify signer keystore file')
logg.debug('loading keystore file {}'.format(keyfile))
address = keystore.import_keystore_file(keyfile, password=passphrase)
signer = EIP155Signer(keystore)
return (address, signer,)

View File

@@ -1,122 +0,0 @@
# stanard imports
import logging
# third-party imports
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import (
StaticPool,
QueuePool,
AssertionPool,
)
logg = logging.getLogger()
Model = declarative_base(name='Model')
class SessionBase(Model):
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
"""
__abstract__ = True
id = Column(Integer, primary_key=True)
engine = None
"""Database connection engine of the running aplication"""
sessionmaker = None
"""Factory object responsible for creating sessions from the connection pool"""
transactional = True
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
poolable = True
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
procedural = True
"""Whether the database backend supports stored procedures"""
localsessions = {}
"""Contains dictionary of sessions initiated by db model components"""
@staticmethod
def create_session():
"""Creates a new database session.
"""
return SessionBase.sessionmaker()
@staticmethod
def _set_engine(engine):
"""Sets the database engine static property
"""
SessionBase.engine = engine
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
@staticmethod
def connect(dsn, pool_size=8, debug=False):
"""Create new database connection engine and connect to database backend.
:param dsn: DSN string defining connection.
:type dsn: str
"""
e = None
if SessionBase.poolable:
poolclass = QueuePool
if pool_size > 1:
e = create_engine(
dsn,
max_overflow=pool_size*3,
pool_pre_ping=True,
pool_size=pool_size,
pool_recycle=60,
poolclass=poolclass,
echo=debug,
)
else:
if debug:
poolclass = AssertionPool
else:
poolclass = StaticPool
e = create_engine(
dsn,
poolclass=poolclass,
echo=debug,
)
else:
e = create_engine(
dsn,
echo=debug,
)
SessionBase._set_engine(e)
@staticmethod
def disconnect():
"""Disconnect from database and free resources.
"""
SessionBase.engine.dispose()
SessionBase.engine = None
@staticmethod
def bind_session(session=None):
localsession = session
if localsession == None:
localsession = SessionBase.create_session()
localsession_key = str(id(localsession))
logg.debug('creating new session {}'.format(localsession_key))
SessionBase.localsessions[localsession_key] = localsession
return localsession
@staticmethod
def release_session(session=None):
session.flush()
session_key = str(id(session))
if SessionBase.localsessions.get(session_key) != None:
logg.debug('destroying session {}'.format(session_key))
session.commit()
session.close()

View File

@@ -1,43 +0,0 @@
# standard imports
import os
import time
import logging
# third-party imports
import semver
version = (
0,
1,
2,
'beta.22',
)
version_object = semver.VersionInfo(
major=version[0],
minor=version[1],
patch=version[2],
prerelease=version[3],
)
def git_hash():
import subprocess
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
return git_hash_brief
version_string = str(version_object)
try:
version_git = git_hash()
version_string += '+build.{}'.format(version_git)
except FileNotFoundError:
time_string_pair = str(time.time()).split('.')
version_string += '+build.{}{:<09d}'.format(
time_string_pair[0],
int(time_string_pair[1]),
)
__version_string__ = version_string

View File

@@ -1,26 +0,0 @@
FROM python:3.8.6-slim-buster
RUN apt-get update && \
apt-get install -y git gcc g++ libpq-dev && \
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client cargo
WORKDIR /usr/src/cic-base
COPY . /usr/src/cic-base/
#RUN mkdir python
#WORKDIR ./python
#COPY ./pep503.sh .
#RUN pip download --no-cache-dir --extra-index-url https://pip.grassrootseconomics.net:8433 cic-base[full_graph]==0.1.1a6
RUN pip install -r requirements.txt
RUN python setup.py bdist_wheel
RUN pip download --extra-index-url https://pip.grassrootseconomics.net:8433 dist/$(basename $(ls dist/*))
RUN mkdir packages && \
cd packages && \
bash ../docker/pep503.sh ..
WORKDIR /usr/src/cic-base/packages
RUN ls
RUN ls ..
ENTRYPOINT ["python", "-m", "http.server", "8080"]

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env
dest=`pwd`
d=$1
for df in `find $d -name "*.whl" -type f`; do
f=`basename $df`
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
mkdir -v $dest/$pd
mv -v $df $dest/$pd/
done
for df in `find $d -name "*.tar.gz" -type f`; do
f=`basename $df`
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
mkdir -v $dest/$pd
mv -v $df $dest/$pd/
done
for df in `find $d -name "*.zip" -type f`; do
f=`basename $df`
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
mkdir -v $dest/$pd
mv -v $df $dest/$pd/
done

View File

@@ -1,20 +0,0 @@
import logging
import os
import cic_base.argparse
import cic_base.config
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
def more_argparse(argparser):
argparser.add_argument('--foo', type=str, help='foo')
script_dir = os.path.realpath(os.path.dirname(__file__))
argparser = cic_base.argparse.create(script_dir, include_args=cic_base.argparse.full_template)
args = cic_base.argparse.parse(argparser, logger=logg)
config = cic_base.config.create(args.c, args, env_prefix=args.env_prefix)
cic_base.config.log(config)

View File

@@ -1,3 +0,0 @@
[foo]
bar = 42
baz = xyzzy

View File

@@ -1,98 +0,0 @@
africastalking==1.2.3
alembic==1.4.2
amqp==2.6.1
attrs==20.3.0
base58==2.1.0
bcrypt==3.2.0
billiard==3.6.3.0
bip-utils==1.4.0
bitarray==1.2.2
blake2b-py==0.1.4
cached-property==1.5.2
celery==4.4.7
certifi==2020.12.5
cffi==1.14.3
chainlib==0.0.1a18
chainsyncer==0.0.1a18
chardet==3.0.4
confini==0.3.6rc3
contextlib2==0.6.0.post1
coverage==5.4
cryptography==3.2.1
cytoolz==0.11.0
ecdsa==0.16.1
ecuth==0.4.5a1
Faker==4.17.1
hexathon==0.0.1a3
hexbytes==0.2.1
http-hoba-auth==0.2.0
idna==2.10
iniconfig==1.1.1
ipfshttpclient==0.6.1
json-rpc==1.13.0
jsonschema==3.2.0
kombu==4.6.11
lru-dict==1.1.7
Mako==1.1.3
MarkupSafe==1.1.1
mirakuru==2.3.0
moolb==0.1.1b2
more-itertools==8.7.0
multiaddr==0.0.9
mypy-extensions==0.4.3
netaddr==0.8.0
packaging==20.9
parsimonious==0.8.1
phonenumbers==8.12.12
pluggy==0.13.1
port-for==0.4
protobuf==3.15.1
psutil==5.8.0
psycopg2==2.8.6
py==1.9.0
py-ecc==4.1.0
py-eth==0.1.1
pycparser==2.20
pycryptodome==3.10.1
pyethash==0.1.27
pyparsing==2.4.7
pyrsistent==0.17.3
pysha3==1.0.2
pytest==6.0.1
pytest-alembic==0.2.5
pytest-celery==0.0.0a1
pytest-cov==2.10.1
pytest-mock==3.3.1
pytest-redis==2.0.0
python-dateutil==2.8.1
python-editor==1.0.4
python-gnupg==0.4.6
python-i18n==0.3.9
pytz==2021.1
PyYAML==5.3.1
redis==3.5.3
requests==2.24.0
rlp==2.0.1
schema==0.7.4
semantic-version==2.8.5
semver==2.13.0
six==1.15.0
sortedcontainers==2.3.0
SQLAlchemy==1.3.20
sqlparse==0.4.1
text-unidecode==1.3
tinydb==4.2.0
toml==0.10.2
toolz==0.11.1
transitions==0.8.4
trie==2.0.0a5
typing-extensions==3.7.4.3
urllib3==1.25.11
uWSGI==2.0.19.1
varint==1.0.2
vine==1.3.0
vobject==0.9.6.1
web3==5.12.2
websocket-client==0.57.0
websockets==8.1
yaml-acl==0.0.1

View File

@@ -1,44 +0,0 @@
africastalking==1.2.3
alembic==1.4.2
bcrypt==3.2.0
celery==4.4.7
confini==0.3.6rc3
crypto-dev-signer==0.4.14b4
cryptography==3.2.1
ecuth==0.4.5a5
eth-accounts-index==0.0.11a14
eth-address-index==0.1.1a12
eth-contract-registry==0.5.5a3
erc20-transfer-authorization==0.3.1a7
erc20-faucet==0.2.1a5
faker==4.17.1
http-hoba-auth==0.2.1a2
moolb==0.1.1b2
phonenumbers==8.12.12
psycopg2==2.8.6
py-eth~=0.1.1
pytest==6.0.1
pytest-alembic==0.2.5
pytest-celery==0.0.0a1
pytest-cov==2.10.1
pytest-mock==3.3.1
pytest-redis==2.0.0
python-i18n==0.3.9
PyYAML==5.3.1
redis==3.5.3
requests==2.24.0
semver==2.13.0
SQLAlchemy==1.3.20
sqlparse==0.4.1
tinydb==4.2.0
transitions==0.8.4
uWSGI==2.0.19.1
vobject==0.9.6.1
web3==5.12.2
websockets==8.1
yaml-acl==0.0.1
rlp==2.0.1
cryptocurrency-cli-tools==0.0.5
websocket-client==0.57.0
hexathon==0.0.1a7
chainsyncer~=0.0.2a5

View File

@@ -1,40 +0,0 @@
confini==0.3.6rc3
crypto-dev-signer==0.4.14b1
semver==2.13.0
SQLAlchemy==1.3.20
pyxdg==0.27
chainlib==0.0.2a10
alembic==1.4.2
celery==4.4.7
cryptography==3.2.1
ecuth==0.4.5a1
eth-accounts-index==0.0.11a8
eth-address-index==0.1.1a8
eth-contract-registry==0.5.4a9
erc20-transfer-authorization==0.3.1a4
erc20-single-shot-faucet==0.2.0a11
faker==4.17.1
http-hoba-auth==0.2.0
moolb==0.1.1b2
phonenumbers==8.12.12
psycopg2==2.8.6
python-i18n==0.3.9
PyYAML==5.3.1
redis==3.5.3
requests==2.24.0
sqlparse==0.4.1
transitions==0.8.4
uWSGI==2.0.19.1
vobject==0.9.6.1
web3==5.12.2
websockets==8.1
yaml-acl==0.0.1
rlp==2.0.1
cryptocurrency-cli-tools==0.0.4
giftable-erc20-token==0.0.8a8
websocket-client==0.57.0
hexathon==0.0.1a7
chainsyncer==0.0.2b1
sarafu-faucet==0.0.2a20
cic-types==0.1.0a10
cic-eth-registry==0.5.4a13

View File

@@ -1,9 +0,0 @@
confini==0.3.6rc3
crypto-dev-signer==0.4.14b4
semver==2.13.0
SQLAlchemy==1.3.20
pyxdg==0.27
chainlib==0.0.3rc3
eth-erc20==0.0.9a4
liveness==0.0.1a7
requirements-magic~=0.0.1a2

View File

@@ -1,35 +0,0 @@
[metadata]
name = cic-base
version = attr: cic_base.version.__version_string__
description = CIC python base
author = Louis Holbrook
author_email = dev@holbrook.no
url = https://gitlab.com/grassrootseconomics/cic-eth
keywords =
cic
cryptocurrency
ethereum
classifiers =
Programming Language :: Python :: 3
Operating System :: OS Independent
Development Status :: 3 - Alpha
Environment :: No Input/Output (Daemon)
Intended Audience :: Developers
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
Topic :: Internet
# Topic :: Blockchain :: EVM
license = GPL3
licence_files =
LICENSE.txt
[options]
python_requires = >= 3.6
packages =
cic_base
[options.entry_points]
console_scripts =
cic-base-audit = cic_base.runnable.audit:main
cic-base-merge = cic_base.runnable.merge:main
cic-base-update = cic_base.runnable.update:main

View File

@@ -1,50 +0,0 @@
from setuptools import setup
import configparser
import os
import logging
import re
logg = logging.getLogger(__name__)
re_v = r'[~><=]='
def merge(requirements_files, base_dir='.'):
requirements = {}
for r in requirements_files:
filepath = os.path.join(base_dir, r)
logg.debug('reading {}'.format(filepath))
f = open(filepath, 'r')
while True:
l = f.readline()
if l == '':
break
l = l.rstrip()
m = re.split(re_v, l)
k = m[0]
if k == None:
raise ValueError('invalid requirement line {}'.format(l))
if requirements.get(k) == None:
logg.info('adding {} -> {}'.format(k, l))
requirements[k] = l
else:
logg.debug('skipping {}'.format(l))
f.close()
return list(requirements.values())
requirements = []
f = open('requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
requirements.append(l.rstrip())
f.close()
setup(
install_requires=requirements,
)

View File

@@ -1,22 +0,0 @@
# standard imports
import logging
import unittest
# external imports
from chainlib.chain import ChainSpec
# local imports
from cic_base.rpc import setup as rpc_setup
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class TestBase(unittest.TestCase):
def setUp(self):
self.chain_spec = ChainSpec('evm', 'foo', 42)
rpc_setup(self.chain_spec, 'http://localhost:8545', signer_provider='ipc://tmp/foo')
def tearDown(self):
pass

View File

@@ -1,14 +0,0 @@
# standard imports
import unittest
# local imports
from tests.base import TestBase
class TestBasic(TestBase):
def test_basic(self):
pass
if __name__ == '__main__':
unittest.main()

View File

@@ -6,4 +6,3 @@ HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=

View File

@@ -6,4 +6,3 @@ HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=

View File

@@ -1,28 +1,22 @@
# standard imports
import logging
import datetime
# external imports
# third-party imports
import moolb
# local imports
from cic_cache.db.list import (
list_transactions_mined,
list_transactions_account_mined,
list_transactions_mined_with_data,
)
from cic_cache.db import list_transactions_mined
from cic_cache.db import list_transactions_account_mined
logg = logging.getLogger()
class Cache:
class BloomCache:
def __init__(self, session):
self.session = session
class BloomCache(Cache):
@staticmethod
def __get_filter_size(n):
n = 8192 * 8
@@ -93,44 +87,3 @@ class BloomCache(Cache):
f_blocktx.add(block + tx)
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
class DataCache(Cache):
def load_transactions_with_data(self, offset, end):
rows = list_transactions_mined_with_data(self.session, offset, end)
tx_cache = []
highest_block = -1;
lowest_block = -1;
date_is_str = None # stick this in startup
for r in rows:
if highest_block == -1:
highest_block = r['block_number']
lowest_block = r['block_number']
tx_type = 'unknown'
if r['value'] != None:
tx_type = '{}.{}'.format(r['domain'], r['value'])
if date_is_str == None:
date_is_str = type(r['date_block']).__name__ == 'str'
o = {
'block_number': r['block_number'],
'tx_hash': r['tx_hash'],
'date_block': r['date_block'],
'sender': r['sender'],
'recipient': r['recipient'],
'from_value': int(r['from_value']),
'to_value': int(r['to_value']),
'source_token': r['source_token'],
'destination_token': r['destination_token'],
'success': r['success'],
'tx_type': tx_type,
}
if date_is_str:
o['date_block'] = datetime.datetime.fromisoformat(r['date_block'])
tx_cache.append(o)
return (lowest_block, highest_block, tx_cache)

View File

@@ -2,14 +2,9 @@
import logging
# local imports
from .list import (
list_transactions_mined,
list_transactions_account_mined,
add_transaction,
tag_transaction,
add_tag,
)
from .list import list_transactions_mined
from .list import list_transactions_account_mined
from .list import add_transaction
logg = logging.getLogger()

View File

@@ -2,9 +2,8 @@
import logging
import datetime
# external imports
# third-party imports
from cic_cache.db.models.base import SessionBase
from sqlalchemy import text
logg = logging.getLogger()
@@ -28,26 +27,6 @@ def list_transactions_mined(
return r
def list_transactions_mined_with_data(
session,
offset,
end,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
r = session.execute(s)
return r
def list_transactions_account_mined(
session,
address,
@@ -71,8 +50,7 @@ def list_transactions_account_mined(
def add_transaction(
session,
tx_hash,
session, tx_hash,
block_number,
tx_index,
sender,
@@ -84,33 +62,6 @@ def add_transaction(
success,
timestamp,
):
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param block_number: Block number
:type block_number: int
:param tx_index: Transaction index in block
:type tx_index: int
:param sender: Ethereum address of effective sender
:type sender: str, 0x-hex
:param receiver: Ethereum address of effective recipient
:type receiver: str, 0x-hex
:param source_token: Ethereum address of token used by sender
:type source_token: str, 0x-hex
:param destination_token: Ethereum address of token received by recipient
:type destination_token: str, 0x-hex
:param from_value: Source token value spent in transaction
:type from_value: int
:param to_value: Destination token value received in transaction
:type to_value: int
:param success: True if code execution on network was successful
:type success: bool
:param date_block: Block timestamp
:type date_block: datetime
"""
date_block = datetime.datetime.fromtimestamp(timestamp)
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
tx_hash,
@@ -126,74 +77,3 @@ def add_transaction(
date_block,
)
session.execute(s)
def tag_transaction(
session,
tx_hash,
name,
domain=None,
):
"""Tag a single transaction with a single tag.
Tag must already exist in storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises ValueError: Unknown tag or transaction hash
"""
s = text("SELECT id from tx where tx_hash = :a")
r = session.execute(s, {'a': tx_hash}).fetchall()
tx_id = r[0].values()[0]
if tx_id == None:
raise ValueError('unknown tx hash {}'.format(tx_hash))
#s = text("SELECT id from tag where value = :a and domain = :b")
if domain == None:
s = text("SELECT id from tag where value = :a")
else:
s = text("SELECT id from tag where value = :a and domain = :b")
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
tag_id = r[0].values()[0]
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
if tag_id == None:
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
def add_tag(
session,
name,
domain=None,
):
"""Add a single tag to storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises sqlalchemy.exc.IntegrityError: Tag already exists
"""
s = None
if domain == None:
s = text("INSERT INTO tag (value) VALUES (:b)")
else:
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
session.execute(s, {'a': domain, 'b': name})

View File

@@ -1,38 +0,0 @@
"""Transaction tags
Revision ID: aaf2bdce7d6e
Revises: 6604de4203e2
Create Date: 2021-05-01 09:20:20.775082
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'aaf2bdce7d6e'
down_revision = '6604de4203e2'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tag',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('domain', sa.String(), nullable=True),
sa.Column('value', sa.String(), nullable=False),
)
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
op.create_table(
'tag_tx_link',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
)
def downgrade():
op.drop_table('tag_tx_link')
op.drop_index('idx_tag_domain_value')
op.drop_table('tag')

View File

@@ -1,2 +1 @@
from .erc20 import *
from .faucet import *

View File

@@ -1,27 +1,2 @@
class TagSyncFilter:
"""Holds tag name and domain for an implementing filter.
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
"""
def __init__(self, name, domain=None):
self.tag_name = name
self.tag_domain = domain
def tag(self):
"""Return tag value/domain.
:rtype: Tuple
:returns: tag value/domain.
"""
return (self.tag_name, self.tag_domain)
def __str__(self):
if self.tag_domain == None:
return self.tag_name
return '{}.{}'.format(self.tag_domain, self.tag_name)
class SyncFilter:
pass

View File

@@ -2,6 +2,7 @@
import logging
# external imports
from chainlib.eth.erc20 import ERC20
from chainlib.eth.address import (
to_checksum_address,
)
@@ -12,19 +13,17 @@ from cic_eth_registry.error import (
NotAContractError,
ContractMismatchError,
)
from eth_erc20 import ERC20
# local imports
from .base import TagSyncFilter
from .base import SyncFilter
from cic_cache import db as cic_cache_db
logg = logging.getLogger().getChild(__name__)
class ERC20TransferFilter(TagSyncFilter):
class ERC20TransferFilter(SyncFilter):
def __init__(self, chain_spec):
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
self.chain_spec = chain_spec
@@ -47,9 +46,6 @@ class ERC20TransferFilter(TagSyncFilter):
except RequestMismatchException:
logg.debug('erc20 match but not a transfer, skipping')
return False
except ValueError:
logg.debug('erc20 match but bogus data, skipping')
return False
token_sender = tx.outputs[0]
token_recipient = transfer_data[0]
@@ -71,13 +67,7 @@ class ERC20TransferFilter(TagSyncFilter):
tx.status == Status.SUCCESS,
block.timestamp,
)
db_session.flush()
cic_cache_db.tag_transaction(
db_session,
tx.hash,
self.tag_name,
domain=self.tag_domain,
)
#db_session.flush()
db_session.commit()
return True

View File

@@ -1,73 +0,0 @@
# standard imports
import logging
# external imports
from erc20_faucet import Faucet
from chainlib.eth.address import to_checksum_address
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.status import Status
from hexathon import strip_0x
# local imports
import cic_cache.db as cic_cache_db
from .base import TagSyncFilter
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
class FaucetFilter(TagSyncFilter):
def __init__(self, chain_spec, sender_address=ZERO_ADDRESS):
super(FaucetFilter, self).__init__('give_to', domain='faucet')
self.chain_spec = chain_spec
self.sender_address = sender_address
def filter(self, conn, block, tx, db_session=None):
try:
data = strip_0x(tx.payload)
except ValueError:
return False
logg.debug('data {}'.format(data))
if Faucet.method_for(data[:8]) == None:
return False
token_sender = tx.inputs[0]
token_recipient = data[64+8-40:]
logg.debug('token recipient {}'.format(token_recipient))
f = Faucet(self.chain_spec)
o = f.token(token_sender, sender_address=self.sender_address)
r = conn.do(o)
token = f.parse_token(r)
f = Faucet(self.chain_spec)
o = f.token_amount(token_sender, sender_address=self.sender_address)
r = conn.do(o)
token_value = f.parse_token_amount(r)
cic_cache_db.add_transaction(
db_session,
tx.hash,
block.number,
tx.index,
to_checksum_address(token_sender),
to_checksum_address(token_recipient),
token,
token,
token_value,
token_value,
tx.status == Status.SUCCESS,
block.timestamp,
)
db_session.flush()
cic_cache_db.tag_transaction(
db_session,
tx.hash,
self.tag_name,
domain=self.tag_domain,
)
db_session.commit()
return True

View File

@@ -1,110 +0,0 @@
# standard imports
import logging
import json
import re
import base64
# local imports
from cic_cache.cache import (
BloomCache,
DataCache,
)
logg = logging.getLogger(__name__)
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
DEFAULT_LIMIT = 100
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
address = r[1]
if r[2] == None:
address = '0x' + address
offset = DEFAULT_LIMIT
if r.lastindex > 2:
offset = r[3]
limit = 0
if r.lastindex > 3:
limit = r[4]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_data(session, env):
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
if not r:
return None
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
return None
offset = r[1]
end = r[2]
if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude')
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end)
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
o = {
'low': lowest_block,
'high': highest_block,
'data': tx_cache,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)

View File

@@ -1,20 +1,18 @@
# standard imports
import os
import re
import logging
import argparse
import json
import base64
# external imports
# third-party imports
import confini
# local imports
from cic_cache import BloomCache
from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase
from cic_cache.runnable.daemons.query import (
process_transactions_account_bloom,
process_transactions_all_bloom,
process_transactions_all_data,
)
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
@@ -46,6 +44,72 @@ logg.debug('config:\n{}'.format(config))
dsn = dsn_from_config(config)
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
DEFAULT_LIMIT = 100
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
address = r[1]
if r[2] == None:
address = '0x' + address
offset = DEFAULT_LIMIT
if r.lastindex > 2:
offset = r[3]
limit = 0
if r.lastindex > 3:
limit = r[4]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
# uwsgi application
def application(env, start_response):
@@ -55,16 +119,10 @@ def application(env, start_response):
session = SessionBase.create_session()
for handler in [
process_transactions_all_data,
process_transactions_all_bloom,
process_transactions_account_bloom,
]:
r = None
try:
r = handler(session, env)
except ValueError as e:
start_response('400 {}'.format(str(e)))
return []
r = handler(session, env)
if r != None:
(mime_type, content) = r
break

View File

@@ -7,16 +7,14 @@ import argparse
import sys
import re
# external imports
# third-party imports
import confini
import celery
import sqlalchemy
import rlp
import cic_base.config
import cic_base.log
import cic_base.argparse
import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry import CICRegistry
from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec
@@ -29,37 +27,26 @@ from hexathon import (
strip_0x,
)
from chainsyncer.backend.sql import SQLBackend
from chainsyncer.driver.head import HeadSyncer
from chainsyncer.driver.history import HistorySyncer
from chainsyncer.driver import (
HeadSyncer,
HistorySyncer,
)
from chainsyncer.db.models.base import SessionBase
# local imports
from cic_cache.db import (
dsn_from_config,
add_tag,
)
from cic_cache.db import dsn_from_config
from cic_cache.runnable.daemons.filters import (
ERC20TransferFilter,
FaucetFilter,
)
script_dir = os.path.realpath(os.path.dirname(__file__))
def add_block_args(argparser):
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
return argparser
logg = cic_base.log.create()
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic')
args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config)
dsn = dsn_from_config(config)
@@ -68,21 +55,10 @@ SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def register_filter_tags(filters, session):
for f in filters:
tag = f.tag()
try:
add_tag(session, tag[0], domain=tag[1])
session.commit()
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
except sqlalchemy.exc.IntegrityError:
session.rollback()
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
def main():
# Connect to blockchain with chainlib
rpc = RPCConnection.connect(chain_spec, 'default')
@@ -91,7 +67,7 @@ def main():
r = rpc.do(o)
block_offset = int(strip_0x(r), 16) + 1
logg.debug('current block height {}'.format(block_offset))
logg.debug('starting at block {}'.format(block_offset))
syncers = []
@@ -100,22 +76,17 @@ def main():
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0:
initial_block_start = config.get('SYNCER_HISTORY_START')
initial_block_offset = block_offset
if config.get('_NO_HISTORY'):
initial_block_start = block_offset
initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
logg.info('found no backends to resume')
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset))
else:
for syncer_backend in syncer_backends:
logg.info('resuming sync session {}'.format(syncer_backend))
for syncer_backend in syncer_backends:
syncers.append(HistorySyncer(syncer_backend, chain_interface))
syncers.append(HistorySyncer(syncer_backend))
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
syncers.append(HeadSyncer(syncer_backend, chain_interface))
syncers.append(HeadSyncer(syncer_backend))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None:
@@ -126,22 +97,11 @@ def main():
logg.info('using trusted address {}'.format(address))
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
faucet_filter = FaucetFilter(chain_spec)
filters = [
erc20_transfer_filter,
faucet_filter,
]
session = SessionBase.create_session()
register_filter_tags(filters, session)
session.close()
i = 0
for syncer in syncers:
logg.debug('running syncer index {}'.format(i))
for f in filters:
syncer.add_filter(f)
syncer.add_filter(erc20_transfer_filter)
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))

View File

@@ -6,4 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0
DEBUG=

View File

@@ -1,2 +1,2 @@
[eth]
provider = http://localhost:63545
provider = ws://localhost:63546

View File

@@ -1,3 +1,2 @@
[syncer]
loop_interval = 1
history_start = 0

View File

@@ -1,3 +1,2 @@
[syncer]
loop_interval = 5
history_start = 0

View File

@@ -1,4 +1,2 @@
[cic]
registry_address =
chain_spec =
trust_address =

View File

@@ -6,4 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=1
DEBUG=

View File

@@ -1,2 +0,0 @@
[syncer]
loop_interval = 1

View File

@@ -17,7 +17,7 @@ RUN apt-get update && \
# Copy shared requirements from top of mono-repo
RUN echo "copying root req file ${root_requirement_file}"
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a76
COPY cic-cache/requirements.txt ./
COPY cic-cache/setup.cfg \
@@ -43,6 +43,10 @@ COPY cic-cache/config/ /usr/local/etc/cic-cache/
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
mkdir -p /usr/local/share/cic/solidity && \
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
COPY cic-cache/docker/db.sh ./db.sh
RUN chmod 755 ./*.sh

View File

@@ -1,14 +1,12 @@
cic-base==0.1.3a3+build.984b5cff
cic-base~=0.1.2a77
alembic==1.4.2
confini~=0.3.6rc3
uwsgi==2.0.19.1
moolb~=0.1.0
cic-eth-registry~=0.5.6a1
cic-eth-registry~=0.5.4a16
SQLAlchemy==1.3.20
semver==2.13.0
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3
rlp==2.0.1
chainsyncer[sql]~=0.0.3a3
erc20-faucet~=0.2.2a1
chainsyncer[sql]~=0.0.2a2

View File

@@ -2,7 +2,6 @@
import os
import argparse
import logging
import re
import alembic
from alembic.config import Config as AlembicConfig
@@ -24,8 +23,6 @@ argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', action='store_true', help='force action')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
@@ -56,10 +53,4 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', dsn)
ac.set_main_option('script_location', migrations_dir)
if args.reset:
if not args.f:
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
logg.error('user chickened out on requested reset, bailing')
sys.exit(1)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')

View File

@@ -4,7 +4,3 @@ pytest-mock==3.3.1
pysqlite3==0.4.3
sqlparse==0.4.1
pytest-celery==0.0.0a1
eth_tester==0.5.0b3
py-evm==0.3.0a20
cic_base[full]==0.1.3a3+build.984b5cff
sarafu-faucet~=0.0.4a1

View File

@@ -3,7 +3,7 @@ import os
import sys
import datetime
# external imports
# third-party imports
import pytest
# local imports
@@ -84,20 +84,3 @@ def txs(
session.commit()
return [
tx_hash_first,
tx_hash_second,
]
@pytest.fixture(scope='function')
def tag_txs(
init_database,
txs,
):
db.add_tag(init_database, 'taag', domain='test')
init_database.commit()
db.tag_transaction(init_database, txs[1], 'taag', domain='test')

View File

@@ -1,3 +0,0 @@
from chainlib.eth.pytest import *
from cic_eth_registry.pytest.fixtures_tokens import *

View File

@@ -1,69 +0,0 @@
# standard imports
import os
import datetime
import logging
import json
# external imports
import pytest
from sqlalchemy import text
from chainlib.eth.tx import Tx
from chainlib.eth.block import Block
from chainlib.chain import ChainSpec
from hexathon import (
strip_0x,
add_0x,
)
# local imports
from cic_cache.db import add_tag
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
logg = logging.getLogger()
def test_erc20_filter(
eth_rpc,
foo_token,
init_database,
list_defaults,
list_actors,
tags,
):
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
fltr = ERC20TransferFilter(chain_spec)
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
data = 'a9059cbb'
data += strip_0x(list_actors['alice'])
data += '1000'.ljust(64, '0')
block = Block({
'hash': os.urandom(32).hex(),
'number': 42,
'timestamp': datetime.datetime.utcnow().timestamp(),
'transactions': [],
})
tx = Tx({
'to': foo_token,
'from': list_actors['bob'],
'data': data,
'value': 0,
'hash': os.urandom(32).hex(),
'nonce': 13,
'gasPrice': 10000000,
'gas': 123456,
})
block.txs.append(tx)
tx.block = block
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert r
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
assert r[0] == tx.hash

View File

@@ -1,71 +0,0 @@
# standard imports
import logging
# external imports
from chainlib.chain import ChainSpec
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.block import (
block_by_hash,
Block,
)
from chainlib.eth.tx import (
receipt,
unpack,
transaction,
Tx,
)
from hexathon import strip_0x
from erc20_faucet.faucet import SingleShotFaucet
from sqlalchemy import text
# local imports
from cic_cache.db import add_tag
from cic_cache.runnable.daemons.filters.faucet import FaucetFilter
logg = logging.getLogger()
def test_filter_faucet(
eth_rpc,
eth_signer,
foo_token,
faucet_noregistry,
init_database,
list_defaults,
contract_roles,
agent_roles,
tags,
):
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
fltr = FaucetFilter(chain_spec, contract_roles['CONTRACT_DEPLOYER'])
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
c = SingleShotFaucet(chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
(tx_hash_hex, o) = c.give_to(faucet_noregistry, agent_roles['ALICE'], agent_roles['ALICE'])
r = eth_rpc.do(o)
tx_src = unpack(bytes.fromhex(strip_0x(o['params'][0])), chain_spec)
o = receipt(r)
r = eth_rpc.do(o)
rcpt = Tx.src_normalize(r)
assert r['status'] == 1
o = block_by_hash(r['block_hash'])
r = eth_rpc.do(o)
block_object = Block(r)
tx = Tx(tx_src, block_object)
tx.apply_receipt(rcpt)
r = fltr.filter(eth_rpc, block_object, tx, init_database)
assert r
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
assert r[0] == tx.hash

View File

@@ -2,7 +2,7 @@
import os
import logging
# external imports
# third-party imports
import pytest
import confini
@@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, 'config/test')
config_dir = os.path.join(root_dir, '.config/test')
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))

View File

@@ -3,16 +3,13 @@ import os
import logging
import re
# external imports
# third-party imports
import pytest
import sqlparse
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_cache.db.models.base import SessionBase
from cic_cache.db import dsn_from_config
from cic_cache.db import add_tag
logg = logging.getLogger(__file__)
@@ -29,10 +26,11 @@ def database_engine(
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
SessionBase.connect(dsn)
return dsn
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
@pytest.fixture(scope='function')
def init_database(
load_config,
@@ -40,23 +38,52 @@ def init_database(
):
rootdir = os.path.dirname(os.path.dirname(__file__))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrationsdir):
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
logg.info('using migrations directory {}'.format(migrationsdir))
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
if load_config.get('DATABASE_ENGINE') == 'sqlite':
rconn = SessionBase.engine.raw_connection()
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
rconn.executescript(s)
else:
rconn = SessionBase.engine.raw_connection()
rcursor = rconn.cursor()
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
r = re.compile(r'^[A-Z]', re.MULTILINE)
for l in sqlparse.parse(s):
strl = str(l)
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
if not re.search(r, strl):
logg.warning('skipping parsed query line {}'.format(strl))
continue
rcursor.execute(strl)
rconn.commit()
rcursor.execute('SET search_path TO public')
# this doesn't work when run separately, no idea why
# functions have been manually added to original schema from cic-eth
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
#
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
rcursor.close()
session = SessionBase.create_session()
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', database_engine)
ac.set_main_option('script_location', migrationsdir)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')
session.commit()
yield session
session.commit()
session.close()
@@ -89,14 +116,3 @@ def list_defaults(
return {
'block': 420000,
}
@pytest.fixture(scope='function')
def tags(
init_database,
):
add_tag(init_database, 'foo')
add_tag(init_database, 'baz', domain='bar')
add_tag(init_database, 'xyzzy', domain='bar')
init_database.commit()

View File

@@ -1,31 +0,0 @@
# standard imports
import json
# external imports
import pytest
# local imports
from cic_cache.runnable.daemons.query import process_transactions_all_data
def test_api_all_data(
init_database,
txs,
):
env = {
'PATH_INFO': '/txa/410000/420000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}
j = process_transactions_all_data(init_database, env)
o = json.loads(j[1])
assert len(o['data']) == 2
env = {
'PATH_INFO': '/txa/420000/410000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}
with pytest.raises(ValueError):
j = process_transactions_all_data(init_database, env)

View File

@@ -4,12 +4,11 @@ import datetime
import logging
import json
# external imports
# third-party imports
import pytest
# local imports
from cic_cache import BloomCache
from cic_cache.cache import DataCache
logg = logging.getLogger()
@@ -34,23 +33,3 @@ def test_cache(
assert b[0] == list_defaults['block'] - 1
def test_cache_data(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
tag_txs,
):
session = init_database
c = DataCache(session)
b = c.load_transactions_with_data(410000, 420000)
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == txs[1]
assert b[2][1]['tx_type'] == 'unknown'
assert b[2][0]['tx_type'] == 'test.taag'

View File

@@ -1,37 +0,0 @@
import os
import datetime
import logging
import json
# external imports
import pytest
# local imports
from cic_cache.db import tag_transaction
logg = logging.getLogger()
def test_cache(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
tags,
):
tag_transaction(init_database, txs[0], 'foo')
tag_transaction(init_database, txs[0], 'baz', domain='bar')
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
assert r[0][0] == txs[1]

View File

@@ -5,5 +5,3 @@ omit =
cic_eth/db/migrations/*
cic_eth/sync/head.py
cic_eth/sync/mempool.py
cic_eth/queue/state.py
*redis*.py

View File

@@ -5,29 +5,18 @@
.cic_eth_changes_target:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
#changes:
#- $CONTEXT/$APP_NAME/**/*
when: always
- changes:
- $CONTEXT/$APP_NAME/*
build-mr-cic-eth:
extends:
- .cic_eth_variables
- .cic_eth_changes_target
- .py_build_target_test
test-mr-cic-eth:
extends:
- .py_build_merge_request
- .cic_eth_variables
- .cic_eth_changes_target
stage: test
image: $CI_REGISTRY_IMAGE/$APP_NAME-test:latest
script:
- cd apps/$APP_NAME/
- pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
needs: ["build-mr-cic-eth"]
build-push-cic-eth:
extends:
- .py_build_push
- .cic_eth_variables

View File

@@ -1,2 +0,0 @@
include *requirements.txt

View File

@@ -4,18 +4,11 @@ import logging
# external imports
import celery
from chainlib.chain import ChainSpec
from chainlib.connection import RPCConnection
from chainlib.eth.tx import (
unpack,
TxFactory,
)
from chainlib.eth.gas import OverrideGasOracle
from chainqueue.sql.query import get_tx
from chainqueue.sql.state import set_cancel
from chainlib.eth.tx import unpack
from chainqueue.query import get_tx
from chainqueue.state import set_cancel
from chainqueue.db.models.otx import Otx
from chainqueue.db.models.tx import TxCache
from hexathon import strip_0x
from potaahto.symbols import snake_and_camel
# local imports
from cic_eth.db.models.base import SessionBase
@@ -28,14 +21,13 @@ from cic_eth.admin.ctrl import (
)
from cic_eth.queue.tx import queue_create
from cic_eth.eth.gas import create_check_gas_task
from cic_eth.task import BaseTask
celery_app = celery.current_app
logg = logging.getLogger()
@celery_app.task(bind=True, base=BaseTask)
def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
@celery_app.task(bind=True)
def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
"""Shift all transactions with nonces higher than the offset by the provided position delta.
Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN.
@@ -46,29 +38,25 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
:type tx_hash_orig_hex: str, 0x-hex
:param delta: Amount
"""
chain_spec = ChainSpec.from_dict(chainspec_dict)
rpc = RPCConnection.connect(chain_spec, 'default')
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
queue = None
try:
queue = self.request.delivery_info.get('routing_key')
except AttributeError:
pass
session = BaseTask.session_func()
tx_brief = get_tx(chain_spec, tx_hash_orig_hex, session=session)
tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx']))
chain_spec = ChainSpec.from_chain_str(chain_str)
tx_brief = get_tx(tx_hash_orig_hex)
tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx'][2:]))
tx = unpack(tx_raw, chain_spec)
nonce = tx_brief['nonce']
address = tx['from']
logg.debug('shifting nonce {} position(s) for address {}, offset {}, hash {}'.format(delta, address, nonce, tx['hash']))
logg.debug('shifting nonce {} position(s) for address {}, offset {}'.format(delta, address, nonce))
lock_queue(None, chain_spec.asdict(), address=address)
lock_send(None, chain_spec.asdict(), address=address)
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
lock_queue(None, chain_str, address)
lock_send(None, chain_str, address)
session = SessionBase.create_session()
q = session.query(Otx)
q = q.join(TxCache)
q = q.filter(TxCache.sender==address)
@@ -81,57 +69,49 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
for otx in otxs:
tx_raw = bytes.fromhex(strip_0x(otx.signed_tx))
tx_new = unpack(tx_raw, chain_spec)
tx_new = snake_and_camel(tx_new)
tx_previous_hash_hex = tx_new['hash']
tx_previous_nonce = tx_new['nonce']
tx_new['gas_price'] += 1
tx_new['gasPrice'] = tx_new['gas_price']
tx_new['nonce'] -= delta
logg.debug('tx_new {}'.format(tx_new))
del(tx_new['hash'])
del(tx_new['hash_unsigned'])
del(tx_new['hashUnsigned'])
tx_new['nonce'] -= delta
gas_oracle = OverrideGasOracle(limit=tx_new['gas'], price=tx_new['gas_price'] + 1) # TODO: it should be possible to merely set this price here and if missing in the existing struct then fill it in (chainlib.eth.tx)
c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx_new)
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx_new, chain_str)
logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce']))
otx = Otx(
tx_new['nonce'],
tx_hash_hex,
tx_signed_raw_hex,
)
nonce=tx_new['nonce'],
address=tx_new['from'],
tx_hash=tx_hash_hex,
signed_tx=tx_signed_raw_hex,
)
session.add(otx)
session.commit()
# TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces.
set_cancel(chain_spec, strip_0x(tx_previous_hash_hex), manual=True, session=session)
set_cancel(tx_previous_hash_hex, True)
TxCache.clone(tx_previous_hash_hex, tx_hash_hex, session=session)
TxCache.clone(tx_previous_hash_hex, tx_hash_hex)
tx_hashes.append(tx_hash_hex)
txs.append(tx_signed_raw_hex)
session.commit()
session.close()
s = create_check_gas_task(
s = create_check_gas_and_send_task(
txs,
chain_spec,
chain_str,
tx_new['from'],
gas=tx_new['gas'],
tx_hashes_hex=tx_hashes,
queue=queue,
tx_new['gas'],
tx_hashes,
queue,
)
s_unlock_send = celery.signature(
'cic_eth.admin.ctrl.unlock_send',
[
chain_spec.asdict(),
chain_str,
tx_new['from'],
],
queue=queue,
@@ -139,7 +119,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
s_unlock_direct = celery.signature(
'cic_eth.admin.ctrl.unlock_queue',
[
chain_spec.asdict(),
chain_str,
tx_new['from'],
],
queue=queue,

View File

@@ -16,6 +16,4 @@ def default_token(self):
return {
'symbol': self.default_token_symbol,
'address': self.default_token_address,
'name': self.default_token_name,
'decimals': self.default_token_decimals,
}

View File

@@ -8,7 +8,6 @@ from chainlib.eth.constant import (
ZERO_ADDRESS,
)
from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token
from cic_eth_registry.error import UnknownContractError
from chainlib.eth.address import to_checksum_address
from chainlib.eth.contract import code
@@ -31,14 +30,13 @@ from chainqueue.db.enum import (
status_str,
)
from chainqueue.error import TxStateChangeError
from chainqueue.sql.query import get_tx
from eth_erc20 import ERC20
# local imports
from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.role import AccountRole
from cic_eth.db.models.nonce import Nonce
from cic_eth.error import InitializationError
from cic_eth.queue.query import get_tx
app = celery.current_app
@@ -190,7 +188,6 @@ class AdminApi:
s_manual = celery.signature(
'cic_eth.queue.state.set_manual',
[
chain_spec.asdict(),
tx_hash_hex,
],
queue=self.queue,
@@ -209,9 +206,8 @@ class AdminApi:
s.link(s_gas)
return s_manual.apply_async()
def check_nonce(self, chain_spec, address):
def check_nonce(self, address):
s = celery.signature(
'cic_eth.queue.query.get_account_tx',
[
@@ -232,12 +228,13 @@ class AdminApi:
s_get_tx = celery.signature(
'cic_eth.queue.query.get_tx',
[
chain_spec.asdict(),
chain_spec.asdict(),
k,
],
queue=self.queue,
)
tx = s_get_tx.apply_async().get()
#tx = get_tx(k)
logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce))
nonce_otx = tx['nonce']
if not is_alive(tx['status']) and tx['status'] & local_fail > 0:
@@ -245,9 +242,7 @@ class AdminApi:
blocking_tx = k
blocking_nonce = nonce_otx
elif nonce_otx - last_nonce > 1:
logg.debug('tx {}'.format(tx))
tx_obj = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)
logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx_obj['from']))
logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx['from']))
blocking_tx = k
blocking_nonce = nonce_otx
break
@@ -261,13 +256,12 @@ class AdminApi:
'blocking': blocking_nonce,
},
'tx': {
'blocking': add_0x(blocking_tx),
'blocking': blocking_tx,
}
}
}
# TODO: is risky since it does not validate that there is actually a nonce problem?
def fix_nonce(self, chain_spec, address, nonce):
def fix_nonce(self, address, nonce, chain_spec):
s = celery.signature(
'cic_eth.queue.query.get_account_tx',
[
@@ -281,17 +275,15 @@ class AdminApi:
txs = s.apply_async().get()
tx_hash_hex = None
session = SessionBase.create_session()
for k in txs.keys():
tx_dict = get_tx(chain_spec, k, session=session)
tx_dict = get_tx(k)
if tx_dict['nonce'] == nonce:
tx_hash_hex = k
session.close()
s_nonce = celery.signature(
'cic_eth.admin.nonce.shift_nonce',
[
chain_spec.asdict(),
self.rpc.chain_spec.asdict(),
tx_hash_hex,
],
queue=self.queue
@@ -396,13 +388,12 @@ class AdminApi:
t = s.apply_async()
tx = t.get()
source_token = None
if tx['source_token'] != ZERO_ADDRESS:
source_token_declaration = None
if registry != None:
try:
source_token_declaration = registry.by_address(tx['source_token'], sender_address=self.call_address)
source_token = registry.by_address(tx['source_token'])
except UnknownContractError:
logg.warning('unknown source token contract {} (direct)'.format(tx['source_token']))
else:
@@ -415,21 +406,16 @@ class AdminApi:
queue=self.queue
)
t = s.apply_async()
source_token_declaration = t.get()
if source_token_declaration != None:
logg.warning('found declarator record for source token {} but not checking validity'.format(tx['source_token']))
source_token = ERC20Token(chain_spec, self.rpc, tx['source_token'])
logg.debug('source token set tup {}'.format(source_token))
source_token = t.get()
if source_token == None:
logg.warning('unknown source token contract {} (task pool)'.format(tx['source_token']))
destination_token = None
if tx['destination_token'] != ZERO_ADDRESS:
destination_token_declaration = None
if registry != None:
try:
destination_token_declaration = registry.by_address(tx['destination_token'], sender_address=self.call_address)
destination_token = registry.by_address(tx['destination_token'])
except UnknownContractError:
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
else:
@@ -442,10 +428,10 @@ class AdminApi:
queue=self.queue
)
t = s.apply_async()
destination_token_declaration = t.get()
if destination_token_declaration != None:
logg.warning('found declarator record for destination token {} but not checking validity'.format(tx['destination_token']))
destination_token = ERC20Token(chain_spec, self.rpc, tx['destination_token'])
destination_token = t.get()
if destination_token == None:
logg.warning('unknown destination token contract {} (task pool)'.format(tx['destination_token']))
tx['sender_description'] = 'Custodial account'
tx['recipient_description'] = 'Custodial account'
@@ -557,19 +543,13 @@ class AdminApi:
if role != None:
tx['recipient_description'] = role
erc20_c = ERC20(chain_spec)
if source_token != None:
tx['source_token_symbol'] = source_token.symbol
o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
r = self.rpc.do(o)
tx['sender_token_balance'] = erc20_c.parse_balance(r)
tx['source_token_symbol'] = source_token.symbol()
tx['sender_token_balance'] = source_token.function('balanceOf')(tx['sender']).call()
if destination_token != None:
tx['destination_token_symbol'] = destination_token.symbol
o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
r = self.rpc.do(o)
tx['recipient_token_balance'] = erc20_c.parse_balance(r)
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
tx['destination_token_symbol'] = destination_token.symbol()
tx['recipient_token_balance'] = source_token.function('balanceOf')(tx['recipient']).call()
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
tx['network_status'] = 'Not in node'

View File

@@ -74,156 +74,29 @@ class Api:
return s_token.apply_async()
# def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
#
# :param from_address: Ethereum address of sender
# :type from_address: str, 0x-hex
# :param to_address: Ethereum address of receipient
# :type to_address: str, 0x-hex
# :param target_return: Estimated return from conversion
# :type target_return: int
# :param minimum_return: The least value of destination token return to allow
# :type minimum_return: int
# :param from_token_symbol: ERC20 token symbol of token being converted
# :type from_token_symbol: str
# :param to_token_symbol: ERC20 token symbol of token to receive
# :type to_token_symbol: str
# :returns: uuid of root task
# :rtype: celery.Task
# """
# raise NotImplementedError('out of service until new DEX migration is done')
# s_check = celery.signature(
# 'cic_eth.admin.ctrl.check_lock',
# [
# [from_token_symbol, to_token_symbol],
# self.chain_spec.asdict(),
# LockEnum.QUEUE,
# from_address,
# ],
# queue=self.queue,
# )
# s_nonce = celery.signature(
# 'cic_eth.eth.nonce.reserve_nonce',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_tokens = celery.signature(
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
# [
# self.chain_str,
# ],
# queue=self.queue,
# )
# s_convert = celery.signature(
# 'cic_eth.eth.bancor.convert_with_default_reserve',
# [
# from_address,
# target_return,
# minimum_return,
# to_address,
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_nonce.link(s_tokens)
# s_check.link(s_nonce)
# if self.callback_param != None:
# s_convert.link(self.callback_success)
# s_tokens.link(s_convert).on_error(self.callback_error)
# else:
# s_tokens.link(s_convert)
#
# t = s_check.apply_async(queue=self.queue)
# return t
#
#
# def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
#
# :param from_address: Ethereum address of sender
# :type from_address: str, 0x-hex
# :param target_return: Estimated return from conversion
# :type target_return: int
# :param minimum_return: The least value of destination token return to allow
# :type minimum_return: int
# :param from_token_symbol: ERC20 token symbol of token being converted
# :type from_token_symbol: str
# :param to_token_symbol: ERC20 token symbol of token to receive
# :type to_token_symbol: str
# :returns: uuid of root task
# :rtype: celery.Task
# """
# raise NotImplementedError('out of service until new DEX migration is done')
# s_check = celery.signature(
# 'cic_eth.admin.ctrl.check_lock',
# [
# [from_token_symbol, to_token_symbol],
# self.chain_spec.asdict(),
# LockEnum.QUEUE,
# from_address,
# ],
# queue=self.queue,
# )
# s_nonce = celery.signature(
# 'cic_eth.eth.nonce.reserve_nonce',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_tokens = celery.signature(
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
# [
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_convert = celery.signature(
# 'cic_eth.eth.bancor.convert_with_default_reserve',
# [
# from_address,
# target_return,
# minimum_return,
# from_address,
# self.chain_spec.asdict(),
# ],
# queue=self.queue,
# )
# s_nonce.link(s_tokens)
# s_check.link(s_nonce)
# if self.callback_param != None:
# s_convert.link(self.callback_success)
# s_tokens.link(s_convert).on_error(self.callback_error)
# else:
# s_tokens.link(s_convert)
#
# t = s_check.apply_async(queue=self.queue)
# return t
def transfer_from(self, from_address, to_address, value, token_symbol, spender_address):
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens by one address on behalf of another address to a third party.
def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
:param from_address: Ethereum address of sender
:type from_address: str, 0x-hex
:param to_address: Ethereum address of recipient
:param to_address: Ethereum address of receipient
:type to_address: str, 0x-hex
:param value: Estimated return from conversion
:type value: int
:param token_symbol: ERC20 token symbol of token to send
:type token_symbol: str
:param spender_address: Ethereum address of recipient
:type spender_address: str, 0x-hex
:param target_return: Estimated return from conversion
:type target_return: int
:param minimum_return: The least value of destination token return to allow
:type minimum_return: int
:param from_token_symbol: ERC20 token symbol of token being converted
:type from_token_symbol: str
:param to_token_symbol: ERC20 token symbol of token to receive
:type to_token_symbol: str
:returns: uuid of root task
:rtype: celery.Task
"""
raise NotImplementedError('out of service until new DEX migration is done')
s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock',
[
[token_symbol],
[from_token_symbol, to_token_symbol],
self.chain_spec.asdict(),
LockEnum.QUEUE,
from_address,
@@ -234,7 +107,70 @@ class Api:
'cic_eth.eth.nonce.reserve_nonce',
[
self.chain_spec.asdict(),
],
queue=self.queue,
)
s_tokens = celery.signature(
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
[
self.chain_str,
],
queue=self.queue,
)
s_convert = celery.signature(
'cic_eth.eth.bancor.convert_with_default_reserve',
[
from_address,
target_return,
minimum_return,
to_address,
self.chain_spec.asdict(),
],
queue=self.queue,
)
s_nonce.link(s_tokens)
s_check.link(s_nonce)
if self.callback_param != None:
s_convert.link(self.callback_success)
s_tokens.link(s_convert).on_error(self.callback_error)
else:
s_tokens.link(s_convert)
t = s_check.apply_async(queue=self.queue)
return t
def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
:param from_address: Ethereum address of sender
:type from_address: str, 0x-hex
:param target_return: Estimated return from conversion
:type target_return: int
:param minimum_return: The least value of destination token return to allow
:type minimum_return: int
:param from_token_symbol: ERC20 token symbol of token being converted
:type from_token_symbol: str
:param to_token_symbol: ERC20 token symbol of token to receive
:type to_token_symbol: str
:returns: uuid of root task
:rtype: celery.Task
"""
raise NotImplementedError('out of service until new DEX migration is done')
s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock',
[
[from_token_symbol, to_token_symbol],
self.chain_spec.asdict(),
LockEnum.QUEUE,
from_address,
],
queue=self.queue,
)
s_nonce = celery.signature(
'cic_eth.eth.nonce.reserve_nonce',
[
self.chain_spec.asdict(),
],
queue=self.queue,
)
@@ -245,41 +181,29 @@ class Api:
],
queue=self.queue,
)
s_allow = celery.signature(
'cic_eth.eth.erc20.check_allowance',
s_convert = celery.signature(
'cic_eth.eth.bancor.convert_with_default_reserve',
[
from_address,
value,
target_return,
minimum_return,
from_address,
self.chain_spec.asdict(),
spender_address,
],
queue=self.queue,
)
s_transfer = celery.signature(
'cic_eth.eth.erc20.transfer_from',
[
from_address,
to_address,
value,
self.chain_spec.asdict(),
spender_address,
],
queue=self.queue,
)
s_tokens.link(s_allow)
s_nonce.link(s_tokens)
s_check.link(s_nonce)
if self.callback_param != None:
s_transfer.link(self.callback_success)
s_allow.link(s_transfer).on_error(self.callback_error)
s_convert.link(self.callback_success)
s_tokens.link(s_convert).on_error(self.callback_error)
else:
s_allow.link(s_transfer)
s_tokens.link(s_convert)
t = s_check.apply_async(queue=self.queue)
return t
def transfer(self, from_address, to_address, value, token_symbol):
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.

View File

@@ -0,0 +1,8 @@
import math
def num_serialize(n):
if n == 0:
return b'\x00'
binlog = math.log2(n)
bytelength = int(binlog / 8 + 1)
return n.to_bytes(bytelength, 'big')

View File

@@ -80,8 +80,3 @@ class SignerError(SeppukuError):
class RoleAgencyError(SeppukuError):
"""Exception raise when a role cannot perform its function. This is a critical exception
"""
class YouAreBrokeError(Exception):
"""Exception raised when a value transfer is attempted without access to sufficient funds
"""

View File

@@ -3,7 +3,7 @@ import logging
# external imports
import celery
from erc20_faucet import Faucet
from erc20_single_shot_faucet import SingleShotFaucet as Faucet
from hexathon import (
strip_0x,
)
@@ -20,9 +20,8 @@ from chainlib.eth.tx import (
)
from chainlib.chain import ChainSpec
from chainlib.error import JSONRPCException
from eth_accounts_index.registry import AccountRegistry
from eth_accounts_index import AccountsIndex
from sarafu_faucet import MinterFaucet
from eth_accounts_index import AccountRegistry
from sarafu_faucet import MinterFaucet as Faucet
from chainqueue.db.models.tx import TxCache
# local import
@@ -134,7 +133,7 @@ def register(self, account_address, chain_spec_dict, writer_address=None):
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
account_registry = AccountsIndex(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
account_registry = AccountRegistry(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
rpc_signer.disconnect()
@@ -186,7 +185,7 @@ def gift(self, account_address, chain_spec_dict):
# Generate and sign transaction
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
gas_oracle = self.create_gas_oracle(rpc, MinterFaucet.gas)
gas_oracle = self.create_gas_oracle(rpc, Faucet.gas)
faucet = Faucet(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
rpc_signer.disconnect()
@@ -339,7 +338,7 @@ def cache_account_data(
chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
tx = unpack(tx_signed_raw_bytes, chain_spec)
tx_data = AccountsIndex.parse_add_request(tx['data'])
tx_data = AccountRegistry.parse_add_request(tx['data'])
session = SessionBase.create_session()
tx_cache = TxCache(

View File

@@ -6,6 +6,7 @@ import celery
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.chain import ChainSpec
from chainlib.connection import RPCConnection
from chainlib.eth.erc20 import ERC20
from chainlib.eth.tx import (
TxFormat,
unpack,
@@ -15,7 +16,6 @@ from cic_eth_registry.erc20 import ERC20Token
from hexathon import strip_0x
from chainqueue.db.models.tx import TxCache
from chainqueue.error import NotLocalTxError
from eth_erc20 import ERC20
# local imports
from cic_eth.db.models.base import SessionBase
@@ -24,7 +24,6 @@ from cic_eth.error import (
TokenCountError,
PermanentTxError,
OutOfGasError,
YouAreBrokeError,
)
from cic_eth.queue.tx import register_tx
from cic_eth.eth.gas import (
@@ -72,117 +71,6 @@ def balance(tokens, holder_address, chain_spec_dict):
return tokens
@celery_app.task(bind=True)
def check_allowance(self, tokens, holder_address, value, chain_spec_dict, spender_address):
"""Best-effort verification that the allowance for a transfer from spend is sufficient.
:raises YouAreBrokeError: If allowance is insufficient
:param tokens: Token addresses
:type tokens: list of str, 0x-hex
:param holder_address: Token holder address
:type holder_address: str, 0x-hex
:param value: Amount of token, in 'wei'
:type value: int
:param chain_str: Chain spec string representation
:type chain_str: str
:param spender_address: Address of account spending on behalf of holder
:type spender_address: str, 0x-hex
:return: Token list as passed to task
:rtype: dict
"""
logg.debug('tokens {}'.format(tokens))
if len(tokens) != 1:
raise TokenCountError
t = tokens[0]
chain_spec = ChainSpec.from_dict(chain_spec_dict)
rpc = RPCConnection.connect(chain_spec, 'default')
caller_address = ERC20Token.caller_address
c = ERC20(chain_spec)
o = c.allowance(t['address'], holder_address, spender_address, sender_address=caller_address)
r = rpc.do(o)
allowance = c.parse_allowance(r)
if allowance < value:
errstr = 'allowance {} insufficent to transfer {} {} by {} on behalf of {}'.format(allowance, value, t['symbol'], spender_address, holder_address)
logg.error(errstr)
raise YouAreBrokeError(errstr)
return tokens
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
def transfer_from(self, tokens, holder_address, receiver_address, value, chain_spec_dict, spender_address):
"""Transfer ERC20 tokens between addresses
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
:param tokens: Token addresses
:type tokens: list of str, 0x-hex
:param holder_address: Token holder address
:type holder_address: str, 0x-hex
:param receiver_address: Token receiver address
:type receiver_address: str, 0x-hex
:param value: Amount of token, in 'wei'
:type value: int
:param chain_str: Chain spec string representation
:type chain_str: str
:param spender_address: Address of account spending on behalf of holder
:type spender_address: str, 0x-hex
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
:return: Transaction hash for tranfer operation
:rtype: str, 0x-hex
"""
# we only allow one token, one transfer
logg.debug('tokens {}'.format(tokens))
if len(tokens) != 1:
raise TokenCountError
t = tokens[0]
chain_spec = ChainSpec.from_dict(chain_spec_dict)
queue = self.request.delivery_info.get('routing_key')
rpc = RPCConnection.connect(chain_spec, 'default')
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
session = self.create_session()
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
try:
(tx_hash_hex, tx_signed_raw_hex) = c.transfer_from(t['address'], spender_address, holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED)
except FileNotFoundError as e:
raise SignerError(e)
except ConnectionError as e:
raise SignerError(e)
rpc_signer.disconnect()
rpc.disconnect()
cache_task = 'cic_eth.eth.erc20.cache_transfer_from_data'
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
session.commit()
session.close()
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
gas_budget = gas_pair[0] * gas_pair[1]
logg.debug('transfer tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
s = create_check_gas_task(
[tx_signed_raw_hex],
chain_spec,
holder_address,
gas_budget,
[tx_hash_hex],
queue,
)
s.apply_async()
return tx_hash_hex
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict):
"""Transfer ERC20 tokens between addresses
@@ -344,7 +232,6 @@ def resolve_tokens_by_symbol(self, token_symbols, chain_spec_dict):
logg.debug('token {}'.format(token_address))
tokens.append({
'address': token_address,
'symbol': token_symbol,
'converters': [],
})
rpc.disconnect()
@@ -392,48 +279,6 @@ def cache_transfer_data(
return (tx_hash_hex, cache_id)
@celery_app.task(base=CriticalSQLAlchemyTask)
def cache_transfer_from_data(
tx_hash_hex,
tx_signed_raw_hex,
chain_spec_dict,
):
"""Helper function for otx_cache_transfer_from
:param tx_hash_hex: Transaction hash
:type tx_hash_hex: str, 0x-hex
:param tx: Signed raw transaction
:type tx: str, 0x-hex
:returns: Transaction hash and id of cache element in storage backend, respectively
:rtype: tuple
"""
chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw_bytes, chain_spec)
tx_data = ERC20.parse_transfer_from_request(tx['data'])
spender_address = tx_data[0]
recipient_address = tx_data[1]
token_value = tx_data[2]
session = SessionBase.create_session()
tx_cache = TxCache(
tx_hash_hex,
tx['from'],
recipient_address,
tx['to'],
tx['to'],
token_value,
token_value,
session=session,
)
session.add(tx_cache)
session.commit()
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
@celery_app.task(base=CriticalSQLAlchemyTask)
def cache_approve_data(
tx_hash_hex,

View File

@@ -57,12 +57,10 @@ celery_app = celery.current_app
logg = logging.getLogger()
MAXIMUM_FEE_UNITS = 8000000
class MaxGasOracle:
def gas(code=None):
return MAXIMUM_FEE_UNITS
return 8000000
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
@@ -152,7 +150,7 @@ def cache_gas_data(
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=None):
"""Check the gas level of the sender address of a transaction.
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
@@ -172,30 +170,24 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
:return: Signed raw transaction data list
:rtype: param txs, unchanged
"""
chain_spec = ChainSpec.from_dict(chain_spec_dict)
logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes))
addresspass = None
if len(txs) == 0:
addresspass = []
for i in range(len(tx_hashes)):
o = get_tx(chain_spec_dict, tx_hashes[i])
o = get_tx(tx_hashes[i])
txs.append(o['signed_tx'])
logg.debug('sender {}'.format(o))
tx = unpack(bytes.fromhex(strip_0x(o['signed_tx'])), chain_spec)
if address == None:
address = tx['from']
elif address != tx['from']:
raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from']))
addresspass.append(address)
address = o['address']
#if not web3.Web3.isChecksumAddress(address):
if not is_checksum_address(address):
raise ValueError('invalid address {}'.format(address))
chain_spec = ChainSpec.from_dict(chain_spec_dict)
queue = self.request.delivery_info.get('routing_key')
conn = RPCConnection.connect(chain_spec)
# TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx
gas_balance = 0
try:
o = balance(address)
@@ -206,9 +198,6 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
conn.disconnect()
raise EthError('gas_balance call for {}: {}'.format(address, e))
if gas_required == None:
gas_required = MAXIMUM_FEE_UNITS
logg.debug('address {} has gas {} needs {}'.format(address, gas_balance, gas_required))
session = SessionBase.create_session()
gas_provider = AccountRole.get_address('GAS_GIFTER', session=session)
@@ -279,8 +268,7 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
queue=queue,
)
ready_tasks.append(s)
t = celery.group(ready_tasks)()
logg.debug('group {}'.format(t))
celery.group(ready_tasks)()
return txs

View File

@@ -21,7 +21,6 @@ from chainqueue.db.models.tx import Otx
from chainqueue.db.models.tx import TxCache
from chainqueue.db.enum import StatusBits
from chainqueue.error import NotLocalTxError
from potaahto.symbols import snake_and_camel
# local imports
from cic_eth.db import SessionBase
@@ -59,9 +58,6 @@ def hashes_to_txs(self, tx_hashes):
if len(tx_hashes) == 0:
raise ValueError('no transaction to send')
for i in range(len(tx_hashes)):
tx_hashes[i] = strip_0x(tx_hashes[i])
queue = self.request.delivery_info['routing_key']
session = SessionBase.create_session()
@@ -152,7 +148,7 @@ def send(self, txs, chain_spec_dict):
@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3Task)
def sync_tx(self, tx_hash_hex, chain_spec_dict):
"""Force update of network status of a single transaction
"""Force update of network status of a simgle transaction
:param tx_hash_hex: Transaction hash
:type tx_hash_hex: str, 0x-hex
@@ -177,14 +173,12 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict):
# TODO: apply receipt in tx object to validate and normalize input
if rcpt != None:
rcpt = snake_and_camel(rcpt)
success = rcpt['status'] == 1
logg.debug('sync tx {} mined block {} tx index {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], rcpt['transactionIndex'], success))
logg.debug('sync tx {} mined block {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], success))
s = celery.signature(
'cic_eth.queue.state.set_final',
[
chain_spec_dict,
tx_hash_hex,
rcpt['blockNumber'],
rcpt['transactionIndex'],
@@ -192,14 +186,12 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict):
],
queue=queue,
)
# TODO: it's not entirely clear how we can reliable determine that its in mempool without explicitly checking
else:
logg.debug('sync tx {} mempool'.format(tx_hash_hex))
s = celery.signature(
'cic_eth.queue.state.set_sent',
[
chain_spec_dict,
tx_hash_hex,
],
queue=queue,

View File

@@ -7,7 +7,7 @@ from chainlib.chain import ChainSpec
from chainlib.connection import RPCConnection
from chainlib.eth.constant import ZERO_ADDRESS
from cic_eth_registry import CICRegistry
from eth_address_declarator import Declarator
from eth_address_declarator import AddressDeclarator
# local imports
from cic_eth.task import BaseTask
@@ -23,12 +23,12 @@ def translate_address(address, trusted_addresses, chain_spec, sender_address=ZER
registry = CICRegistry(chain_spec, rpc)
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
c = Declarator(chain_spec)
c = AddressDeclarator(chain_spec)
for trusted_address in trusted_addresses:
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
r = rpc.do(o)
declaration_hex = Declarator.parse_declaration(r)
declaration_hex = AddressDeclarator.parse_declaration(r)
declaration_hex = declaration_hex[0].rstrip('0')
declaration_bytes = bytes.fromhex(declaration_hex)
declaration = None

View File

@@ -14,13 +14,13 @@ from chainlib.eth.tx import (
)
from chainlib.eth.block import block_by_number
from chainlib.eth.contract import abi_decode_single
from chainlib.eth.erc20 import ERC20
from hexathon import strip_0x
from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusEnum
from chainqueue.sql.query import get_tx_cache
from eth_erc20 import ERC20
from chainqueue.query import get_tx_cache
# local imports
from cic_eth.queue.time import tx_times
@@ -114,7 +114,7 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
# TODO: pass through registry to validate declarator entry of token
#token = registry.by_address(tx['to'], sender_address=self.call_address)
token = ERC20Token(chain_spec, rpc, tx['to'])
token = ERC20Token(rpc, tx['to'])
token_symbol = token.symbol
token_decimals = token.decimals
times = tx_times(tx['hash'], chain_spec)

View File

@@ -1,77 +0,0 @@
# standard imports
import os
# external imports
import pytest
from chainlib.eth.contract import (
ABIContractEncoder,
ABIContractType,
)
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from chainlib.eth.tx import (
receipt,
TxFactory,
TxFormat,
unpack,
Tx,
)
from hexathon import strip_0x
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
@pytest.fixture(scope='function')
def bogus_tx_block(
default_chain_spec,
eth_rpc,
eth_signer,
contract_roles,
):
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
gas_oracle = OverrideGasOracle(limit=2000000, conn=eth_rpc)
f = open(os.path.join(script_dir, 'testdata', 'Bogus.bin'), 'r')
bytecode = f.read()
f.close()
c = TxFactory(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
tx = c.template(contract_roles['CONTRACT_DEPLOYER'], None, use_nonce=True)
tx = c.set_code(tx, bytecode)
(tx_hash_hex, o) = c.build(tx)
r = eth_rpc.do(o)
o = receipt(tx_hash_hex)
r = eth_rpc.do(o)
contract_address = r['contract_address']
enc = ABIContractEncoder()
enc.method('poke')
data = enc.get()
tx = c.template(contract_roles['CONTRACT_DEPLOYER'], contract_address, use_nonce=True)
tx = c.set_code(tx, data)
(tx_hash_hex, o) = c.finalize(tx, TxFormat.JSONRPC)
r = eth_rpc.do(o)
tx_signed_raw_hex = strip_0x(o['params'][0])
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
return (block, tx)

View File

@@ -1 +0,0 @@
60806040526000805534801561001457600080fd5b50610181806100246000396000f3fe608060405234801561001057600080fd5b5060043610610053576000357c0100000000000000000000000000000000000000000000000000000000900480630dbe671f146100585780631817835814610076575b600080fd5b610060610080565b60405161006d91906100ae565b60405180910390f35b61007e610086565b005b60005481565b600080815480929190610098906100d3565b9190505550565b6100a8816100c9565b82525050565b60006020820190506100c3600083018461009f565b92915050565b6000819050919050565b60006100de826100c9565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101115761011061011c565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fdfea264697066735822122034ad8e91e864f030d47f5b93e281869206c1b203c36dc79a209ac9c9c16e577564736f6c63430008040033

View File

@@ -1,10 +0,0 @@
pragma solidity ^0.8.0;
contract Bogus {
uint256 public a = 0;
function poke() public {
a++;
}
}

View File

@@ -5,7 +5,7 @@ import datetime
import celery
from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack
import chainqueue.sql.query
import chainqueue.query
from chainqueue.db.enum import (
StatusEnum,
is_alive,
@@ -28,7 +28,7 @@ celery_app = celery.current_app
def get_tx_cache(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session)
r = chainqueue.query.get_tx_cache(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -37,7 +37,7 @@ def get_tx_cache(chain_spec_dict, tx_hash):
def get_tx(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session)
r = chainqueue.query.get_tx(chain_spec, tx_hash)
session.close()
return r
@@ -46,7 +46,7 @@ def get_tx(chain_spec_dict, tx_hash):
def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
r = chainqueue.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
session.close()
return r
@@ -55,17 +55,17 @@ def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True,
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack)
r = chainqueue.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack)
session.close()
return r
def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None):
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack)
return chainqueue.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack)
def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None):
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack)
return chainqueue.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack)
def get_nonce_tx(chain_spec, nonce, sender):

View File

@@ -1,6 +1,6 @@
# external imports
from chainlib.chain import ChainSpec
import chainqueue.sql.state
import chainqueue.state
# local imports
import celery
@@ -14,7 +14,7 @@ celery_app = celery.current_app
def set_sent(chain_spec_dict, tx_hash, fail=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session)
r = chainqueue.state.set_sent(chain_spec, tx_hash, fail, session=session)
session.close()
return r
@@ -23,7 +23,7 @@ def set_sent(chain_spec_dict, tx_hash, fail=False):
def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
r = chainqueue.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
session.close()
return r
@@ -32,7 +32,7 @@ def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
def set_cancel(chain_spec_dict, tx_hash, manual=False):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session)
r = chainqueue.state.set_cancel(chain_spec, tx_hash, manual, session=session)
session.close()
return r
@@ -41,7 +41,7 @@ def set_cancel(chain_spec_dict, tx_hash, manual=False):
def set_rejected(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_rejected(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -50,7 +50,7 @@ def set_rejected(chain_spec_dict, tx_hash):
def set_fubar(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_fubar(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -59,7 +59,7 @@ def set_fubar(chain_spec_dict, tx_hash):
def set_manual(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_manual(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -68,7 +68,7 @@ def set_manual(chain_spec_dict, tx_hash):
def set_ready(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_ready(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -77,7 +77,7 @@ def set_ready(chain_spec_dict, tx_hash):
def set_reserved(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_reserved(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -86,7 +86,7 @@ def set_reserved(chain_spec_dict, tx_hash):
def set_waitforgas(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session)
r = chainqueue.state.set_waitforgas(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -95,7 +95,7 @@ def set_waitforgas(chain_spec_dict, tx_hash):
def get_state_log(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session)
r = chainqueue.state.get_state_log(chain_spec, tx_hash, session=session)
session.close()
return r
@@ -104,6 +104,6 @@ def get_state_log(chain_spec_dict, tx_hash):
def obsolete(chain_spec_dict, tx_hash, final):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
r = chainqueue.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
session.close()
return r

View File

@@ -12,7 +12,6 @@ from chainqueue.error import NotLocalTxError
# local imports
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
from cic_eth.db.models.base import SessionBase
celery_app = celery.current_app

View File

@@ -15,14 +15,14 @@ from sqlalchemy import tuple_
from sqlalchemy import func
from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack
import chainqueue.sql.state
import chainqueue.state
from chainqueue.db.enum import (
StatusEnum,
StatusBits,
is_alive,
dead,
)
from chainqueue.sql.tx import create
from chainqueue.tx import create
from chainqueue.error import NotLocalTxError
from chainqueue.db.enum import status_str

View File

@@ -5,30 +5,29 @@ import logging
from cic_eth_registry import CICRegistry
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
from chainlib.eth.constant import ZERO_ADDRESS
logg = logging.getLogger()
def connect_token_registry(rpc, chain_spec, sender_address=ZERO_ADDRESS):
def connect_token_registry(rpc, chain_spec):
registry = CICRegistry(chain_spec, rpc)
token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
token_registry_address = registry.by_name('TokenRegistry')
logg.debug('using token registry address {}'.format(token_registry_address))
lookup = TokenIndexLookup(chain_spec, token_registry_address)
CICRegistry.add_lookup(lookup)
def connect_declarator(rpc, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
def connect_declarator(rpc, chain_spec, trusted_addresses):
registry = CICRegistry(chain_spec, rpc)
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
declarator_address = registry.by_name('AddressDeclarator')
logg.debug('using declarator address {}'.format(declarator_address))
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
CICRegistry.add_lookup(lookup)
def connect(rpc, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
def connect(rpc, chain_spec, registry_address):
CICRegistry.address = registry_address
registry = CICRegistry(chain_spec, rpc)
registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
registry_address = registry.by_name('ContractRegistry')
return registry

View File

@@ -21,7 +21,7 @@ from chainqueue.db.enum import (
StatusBits,
)
from chainqueue.error import NotLocalTxError
from chainqueue.sql.state import set_reserved
from chainqueue.state import set_reserved
# local imports
import cic_eth

View File

@@ -3,20 +3,19 @@ import logging
# external imports
import celery
from cic_eth_registry.error import (
UnknownContractError,
NotAContractError,
)
from cic_eth_registry.error import UnknownContractError
from chainlib.status import Status as TxStatus
from chainlib.eth.address import to_checksum_address
from chainlib.eth.error import RequestMismatchException
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.erc20 import ERC20
from hexathon import (
strip_0x,
add_0x,
)
from eth_erc20 import ERC20
from erc20_faucet import Faucet
# TODO: use sarafu_Faucet for both when inheritance has been implemented
from erc20_single_shot_faucet import SingleShotFaucet
from sarafu_faucet import MinterFaucet as Faucet
# local imports
from .base import SyncFilter
@@ -72,15 +71,14 @@ class CallbackFilter(SyncFilter):
#transfer_data['token_address'] = tx.inputs[0]
faucet_contract = tx.inputs[0]
c = Faucet(self.chain_spec)
c = SingleShotFaucet(self.chain_spec)
o = c.token(faucet_contract, sender_address=self.caller_address)
r = conn.do(o)
transfer_data['token_address'] = add_0x(c.parse_token(r))
o = c.token_amount(faucet_contract, sender_address=self.caller_address)
o = c.amount(faucet_contract, sender_address=self.caller_address)
r = conn.do(o)
transfer_data['value'] = c.parse_token_amount(r)
transfer_data['value'] = c.parse_amount(r)
return ('tokengift', transfer_data)
@@ -129,7 +127,8 @@ class CallbackFilter(SyncFilter):
(transfer_type, transfer_data) = parser(tx, conn)
if transfer_type == None:
continue
break
else:
pass
except RequestMismatchException:
continue
@@ -172,9 +171,7 @@ class CallbackFilter(SyncFilter):
t = self.call_back(transfer_type, result)
logg.info('callback success task id {} tx {} queue {}'.format(t, tx.hash, t.queue))
except UnknownContractError:
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
except NotAContractError:
logg.debug('callback filter {}:{} skipping "transfer" on non-contract address {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tx.queue, tx.method, transfer_data['to'], tx.hash))
def __str__(self):

View File

@@ -10,15 +10,14 @@ from chainlib.eth.tx import unpack
from chainqueue.db.enum import StatusBits
from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
from chainqueue.query import get_paused_tx_cache as get_paused_tx
# local imports
from cic_eth.db.models.base import SessionBase
from cic_eth.eth.gas import create_check_gas_task
from .base import SyncFilter
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
logg = logging.getLogger().getChild(__name__)
class GasFilter(SyncFilter):
@@ -28,11 +27,11 @@ class GasFilter(SyncFilter):
self.chain_spec = chain_spec
def filter(self, conn, block, tx, db_session):
def filter(self, conn, block, tx, session):
if tx.value > 0:
tx_hash_hex = add_0x(tx.hash)
logg.debug('gas refill tx {}'.format(tx_hash_hex))
session = SessionBase.bind_session(db_session)
session = SessionBase.bind_session(session)
q = session.query(TxCache.recipient)
q = q.join(Otx)
q = q.filter(Otx.tx_hash==strip_0x(tx_hash_hex))
@@ -57,7 +56,7 @@ class GasFilter(SyncFilter):
tx_hashes_hex=list(txs.keys()),
queue=self.queue,
)
return s.apply_async()
s.apply_async()
def __str__(self):

View File

@@ -14,7 +14,7 @@ from .base import SyncFilter
logg = logging.getLogger().getChild(__name__)
account_registry_add_log_hash = '0x9cc987676e7d63379f176ea50df0ae8d2d9d1141d1231d4ce15b5965f73c9430'
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd'
class RegistrationFilter(SyncFilter):
@@ -50,8 +50,7 @@ class RegistrationFilter(SyncFilter):
queue=self.queue,
)
s_nonce.link(s_gift)
t = s_nonce.apply_async()
return t
s_nonce.apply_async()
def __str__(self):

View File

@@ -3,7 +3,7 @@ import logging
# external imports
import celery
from chainqueue.sql.state import obsolete_by_cache
from chainqueue.state import obsolete_by_cache
logg = logging.getLogger()

View File

@@ -32,7 +32,7 @@ class TransferAuthFilter(SyncFilter):
self.transfer_request_contract = registry.by_name('TransferAuthorization', sender_address=call_address)
def filter(self, conn, block, tx, db_session): #rcpt, chain_str, session=None):
def filter(self, conn, block, tx, session): #rcpt, chain_str, session=None):
if tx.payload == None:
logg.debug('no payload')
@@ -45,17 +45,16 @@ class TransferAuthFilter(SyncFilter):
return False
recipient = tx.inputs[0]
#if recipient != self.transfer_request_contract.address():
if recipient != self.transfer_request_contract:
if recipient != self.transfer_request_contract.address():
logg.debug('not our transfer auth contract address {}'.format(recipient))
return False
r = TransferAuthorization.parse_create_request_request(tx.payload)
sender = r[0]
recipient = r[1]
token = r[2]
value = r[3]
sender = abi_decode_single(ABIContractType.ADDRESS, r[0])
recipient = abi_decode_single(ABIContractType.ADDRESS, r[1])
token = abi_decode_single(ABIContractType.ADDRESS, r[2])
value = abi_decode_single(ABIContractType.UINT256, r[3])
token_data = {
'address': token,
@@ -65,7 +64,6 @@ class TransferAuthFilter(SyncFilter):
'cic_eth.eth.nonce.reserve_nonce',
[
[token_data],
self.chain_spec.asdict(),
sender,
],
queue=self.queue,
@@ -82,7 +80,7 @@ class TransferAuthFilter(SyncFilter):
)
s_nonce.link(s_approve)
t = s_nonce.apply_async()
return t
return True
def __str__(self):

View File

@@ -30,7 +30,7 @@ class TxFilter(SyncFilter):
if otx == None:
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
return None
logg.debug('otx filter match on {}'.format(otx.tx_hash))
logg.info('tx filter match on {}'.format(otx.tx_hash))
db_session.flush()
SessionBase.release_session(db_session)
s_final_state = celery.signature(

View File

@@ -0,0 +1,136 @@
# standard imports
import os
import re
import logging
import argparse
import json
# third-party imports
import web3
import confini
import celery
from json.decoder import JSONDecodeError
from cic_registry.chain import ChainSpec
# local imports
from cic_eth.db import dsn_from_config
from cic_eth.db.models.base import SessionBase
from cic_eth.eth.util import unpack_signed_raw_tx
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
config_dir = os.path.join('/usr/local/etc/cic-eth')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
re_something = r'^/something/?'
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
def process_something(session, env):
r = re.match(re_something, env.get('PATH_INFO'))
if not r:
return None
#if env.get('CONTENT_TYPE') != 'application/json':
# raise AttributeError('content type')
#if env.get('REQUEST_METHOD') != 'POST':
# raise AttributeError('method')
#post_data = json.load(env.get('wsgi.input'))
#return ('text/plain', 'foo'.encode('utf-8'),)
# uwsgi application
def application(env, start_response):
for k in env.keys():
logg.debug('env {} {}'.format(k, env[k]))
headers = []
content = b''
err = None
session = SessionBase.create_session()
for handler in [
process_something,
]:
try:
r = handler(session, env)
except AttributeError as e:
logg.error('handler fail attribute {}'.format(e))
err = '400 Impertinent request'
break
except JSONDecodeError as e:
logg.error('handler fail json {}'.format(e))
err = '400 Invalid data format'
break
except KeyError as e:
logg.error('handler fail key {}'.format(e))
err = '400 Invalid JSON'
break
except ValueError as e:
logg.error('handler fail value {}'.format(e))
err = '400 Invalid data'
break
except RuntimeError as e:
logg.error('task fail value {}'.format(e))
err = '500 Task failed, sorry I cannot tell you more'
break
if r != None:
(mime_type, content) = r
break
session.close()
if err != None:
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
start_response(err, headers)
session.close()
return [content]
headers.append(('Content-Length', str(len(content))),)
headers.append(('Access-Control-Allow-Origin', '*',));
if len(content) == 0:
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
start_response('404 Looked everywhere, sorry', headers)
else:
headers.append(('Content-Type', mime_type,))
start_response('200 OK', headers)
return [content]

View File

@@ -22,7 +22,6 @@ from chainlib.eth.connection import (
from chainlib.chain import ChainSpec
from chainqueue.db.models.otx import Otx
from cic_eth_registry.error import UnknownContractError
from cic_eth_registry.erc20 import ERC20Token
import liveness.linux
@@ -37,7 +36,6 @@ from cic_eth.eth import (
from cic_eth.admin import (
debug,
ctrl,
token,
)
from cic_eth.queue import (
query,
@@ -76,6 +74,7 @@ argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
argparser.add_argument('-r', type=str, help='CIC registry address')
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
@@ -121,25 +120,20 @@ broker = config.get('CELERY_BROKER_URL')
if broker[:4] == 'file':
bq = tempfile.mkdtemp()
bp = tempfile.mkdtemp()
conf_update = {
current_app.conf.update({
'broker_url': broker,
'broker_transport_options': {
'data_folder_in': bq,
'data_folder_out': bq,
'data_folder_processed': bp,
},
}
if config.true('CELERY_DEBUG'):
conf_update['result_extended'] = True
current_app.conf.update(conf_update)
},
)
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
else:
conf_update = {
'broker_url': broker,
}
if config.true('CELERY_DEBUG'):
conf_update['result_extended'] = True
current_app.conf.update(conf_update)
current_app.conf.update({
'broker_url': broker,
})
result = config.get('CELERY_RESULT_URL')
if result[:4] == 'file':
@@ -194,7 +188,6 @@ def main():
except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1)
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None:
@@ -209,11 +202,6 @@ def main():
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address)
default_token.load(rpc)
BaseTask.default_token_decimals = default_token.decimals
BaseTask.default_token_name = default_token.name
BaseTask.run_dir = config.get('CIC_RUN_DIR')
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))

View File

@@ -15,7 +15,6 @@ import cic_base.config
import cic_base.log
import cic_base.argparse
import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS
@@ -27,8 +26,10 @@ from hexathon import (
strip_0x,
)
from chainsyncer.backend.sql import SQLBackend
from chainsyncer.driver.head import HeadSyncer
from chainsyncer.driver.history import HistorySyncer
from chainsyncer.driver import (
HeadSyncer,
HistorySyncer,
)
from chainsyncer.db.models.base import SessionBase
# local imports
@@ -50,23 +51,15 @@ from cic_eth.registry import (
script_dir = os.path.realpath(os.path.dirname(__file__))
def add_block_args(argparser):
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
return argparser
logg = cic_base.log.create()
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic')
args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.y, '_KEYSTORE_FILE', True)
config.add(args.q, '_CELERY_QUEUE', True)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config)
@@ -76,10 +69,9 @@ SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def main():
# connect to celery
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
@@ -97,7 +89,7 @@ def main():
stat = init_chain_stat(rpc, block_start=block_current)
loop_interval = stat.block_average()
logg.debug('current block height {}'.format(block_offset))
logg.debug('starting at block {}'.format(block_offset))
syncers = []
@@ -106,13 +98,8 @@ def main():
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0:
initial_block_start = config.get('SYNCER_HISTORY_START')
initial_block_offset = block_offset
if config.get('_NO_HISTORY'):
initial_block_start = block_offset
initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
logg.info('found no backends to resume')
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset))
else:
for syncer_backend in syncer_backends:
logg.info('resuming sync session {}'.format(syncer_backend))
@@ -121,11 +108,11 @@ def main():
for syncer_backend in syncer_backends:
try:
syncers.append(HistorySyncer(syncer_backend, chain_interface))
syncers.append(HistorySyncer(syncer_backend))
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
except AttributeError:
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
syncers.append(HeadSyncer(syncer_backend, chain_interface))
syncers.append(HeadSyncer(syncer_backend))
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
@@ -168,6 +155,7 @@ def main():
for cf in callback_filters:
syncer.add_filter(cf)
#r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
r = syncer.loop(int(loop_interval), rpc)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))

View File

@@ -12,10 +12,7 @@ import confini
import celery
# local imports
from cic_eth.api import (
Api,
AdminApi,
)
from cic_eth.api import Api
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
@@ -56,20 +53,13 @@ celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=confi
queue = args.q
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
admin_api = AdminApi(None)
def main():
t = admin_api.registry()
registry_address = t.get()
print('Registry: {}'.format(registry_address))
t = api.default_token()
token_info = t.get()
print('Default token symbol: {}'.format(token_info['symbol']))
print('Default token address: {}'.format(token_info['address']))
logg.debug('Default token name: {}'.format(token_info['name']))
logg.debug('Default token decimals: {}'.format(token_info['decimals']))
if __name__ == '__main__':
main()

View File

@@ -20,11 +20,7 @@ def init_chain_stat(rpc, block_start=0):
if block_start == 0:
o = block_latest()
r = rpc.do(o)
try:
block_start = int(r, 16)
except TypeError:
block_start = int(r)
logg.debug('blockstart {}'.format(block_start))
block_start = int(r, 16)
for i in range(BLOCK_SAMPLES):
o = block_by_number(block_start-10+i)

View File

@@ -20,8 +20,7 @@ import liveness.linux
from cic_eth.error import SeppukuError
from cic_eth.db.models.base import SessionBase
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
logg = logging.getLogger().getChild(__name__)
celery_app = celery.current_app
@@ -34,8 +33,6 @@ class BaseTask(celery.Task):
create_gas_oracle = RPCGasOracle
default_token_address = None
default_token_symbol = None
default_token_name = None
default_token_decimals = None
run_dir = '/run'
def create_session(self):
@@ -119,13 +116,12 @@ def registry():
return CICRegistry.address
@celery_app.task(bind=True, base=BaseTask)
def registry_address_lookup(self, chain_spec_dict, address, connection_tag='default'):
@celery_app.task()
def registry_address_lookup(chain_spec_dict, address, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn)
r = registry.by_address(address, sender_address=self.call_address)
return r
return registry.by_address(address)
@celery_app.task(throws=(UnknownContractError,))
@@ -133,7 +129,7 @@ def registry_name_lookup(chain_spec_dict, name, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn)
return registry.by_name(name, sender_address=self.call_address)
return registry.by_name(name)
@celery_app.task()

View File

@@ -9,8 +9,8 @@ import semver
version = (
0,
11,
1,
'alpha.3',
0,
'beta.11',
)
version_object = semver.VersionInfo(

View File

@@ -1,4 +1,3 @@
[celery]
broker_url = redis://
result_url = redis://
debug = 0

View File

@@ -1,4 +1,3 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379
debug = 0

View File

@@ -1,3 +1,2 @@
[SYNCER]
loop_interval =
history_start = 0

Some files were not shown because too many files have changed in this diff Show More