cic cache build

This commit is contained in:
Blair Vanderlugt 2021-02-18 05:04:30 +00:00
parent 725ef54cf5
commit b26a14e8ca
66 changed files with 5774 additions and 8 deletions

View File

@ -5,6 +5,7 @@ include:
- local: 'apps/cic-ussd/.gitlab-ci.yml' - local: 'apps/cic-ussd/.gitlab-ci.yml'
- local: 'apps/cic-notify/.gitlab-ci.yml' - local: 'apps/cic-notify/.gitlab-ci.yml'
- local: 'apps/cic-meta/.gitlab-ci.yml' - local: 'apps/cic-meta/.gitlab-ci.yml'
- local: 'apps/cic-cache/.gitlab-ci.yml'
stages: stages:
- build - build

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "apps/cic-cache"]
path = apps/cic-cache
url = git@gitlab.com:grassrootseconomics/cic-cache.git

@ -1 +0,0 @@
Subproject commit d2cb3a45558d7ca3a412c97c6aea794d9ac6c6f5

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -0,0 +1,2 @@
[cic]
registry_address =

View File

@ -0,0 +1,8 @@
[database]
NAME=cic-eth
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2

View File

@ -0,0 +1,6 @@
[eth]
provider = ws://localhost:8545
#ttp_provider = http://localhost:8545
#provider = http://localhost:8545
gas_provider_address =
#chain_id =

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -0,0 +1,2 @@
[cic]
registry_address =

View File

@ -0,0 +1,8 @@
[database]
NAME=cic-cache-test
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite

View File

@ -0,0 +1,5 @@
[eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545
#chain_id =

View File

@ -0,0 +1,5 @@
[report]
omit =
.venv/*
scripts/*
cic_cache/db/postgres/*

View File

@ -0,0 +1,7 @@
set -a
CICTEST_DATABASE_ENGINE=postgresql
CICTEST_DATABASE_DRIVER=psycopg2
CICTEST_DATABASE_HOST=localhost
CICTEST_DATABASE_PORT=5432
CICTEST_DATABASE_NAME=cic-eth-test
set +a

8
apps/cic-cache/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
.envrc
.envrc_dev
.venv
__pycache__
*.pyc
_build
doc/**/*.png
doc/**/html

View File

@ -0,0 +1,22 @@
.cic_cache_variables:
variables:
APP_NAME: cic-cache
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
.cic_cache_changes_target:
rules:
- changes:
- $CONTEXT/$APP_NAME/*
build-mr-cic-cache:
extends:
- .cic_cache_changes_target
- .py_build_merge_request
- .cic_cache_variables
build-push-cic-cache:
extends:
- .py_build_push
- .cic_cache_variables

13
apps/cic-cache/CHANGELOG Normal file
View File

@ -0,0 +1,13 @@
- 0.1.2
* Revert to alembic migrations
- 0.1.1
* Add missing modules to setup
- 0.1.0
* Remove old APIs
* Add bloom filter output APIs for all txs and per-account txs
- 0.0.2
* UWSGI server endpoint example
* OpenAPI spec
* stored procedures, test fixture for database schema
- 0.0.1
* Add json translators of transaction_list and balances stored procedure queries

0
apps/cic-cache/README.md Normal file
View File

View File

@ -0,0 +1 @@
from .cache import BloomCache

View File

@ -0,0 +1,73 @@
"""API for cic-cache celery tasks
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
"""
# standard imports
import logging
# third-party imports
import celery
app = celery.current_app
logg = logging.getLogger(__name__)
class Api:
"""Creates task chains to perform well-known CIC operations.
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
:param callback_param: Static value to pass to callback
:type callback_param: str
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
:type callback_task: string
:param queue: Name of worker queue to submit tasks to
:type queue: str
"""
def __init__(self, queue='cic-cache', callback_param=None, callback_task='cic_cache.callbacks.noop.noop', callback_queue=None):
self.callback_param = callback_param
self.callback_task = callback_task
self.queue = queue
logg.info('api using queue {}'.format(self.queue))
self.callback_success = None
self.callback_error = None
if callback_queue == None:
callback_queue=self.queue
if callback_param != None:
self.callback_success = celery.signature(
callback_task,
[
callback_param,
0,
],
queue=callback_queue,
)
self.callback_error = celery.signature(
callback_task,
[
callback_param,
1,
],
queue=callback_queue,
)
def list(self, offset, limit, address=None):
s = celery.signature(
'cic_cache.tasks.tx.tx_filter',
[
0,
100,
address,
],
queue=None
)
if self.callback_param != None:
s.link(self.callback_success).on_error(self.callback_error)
t = s.apply_async()
return t

View File

@ -0,0 +1,89 @@
# standard imports
import logging
# third-party imports
import moolb
# local imports
from cic_cache.db import list_transactions_mined
from cic_cache.db import list_transactions_account_mined
logg = logging.getLogger()
class BloomCache:
def __init__(self, session):
self.session = session
@staticmethod
def __get_filter_size(n):
n = 8192 * 8
logg.warning('filter size hardcoded to {}'.format(n))
return n
def load_transactions(self, offset, limit):
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
For example, if the block number is 13 and the transaction index is 42, the input are:
block filter: 0x0d000000
block+tx filter: 0x0d0000002a0000000
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
:rtype: tuple
"""
rows = list_transactions_mined(self.session, offset, limit)
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
highest_block = -1
lowest_block = -1
for r in rows:
if highest_block == -1:
highest_block = r[0]
lowest_block = r[0]
block = r[0].to_bytes(4, byteorder='big')
tx = r[1].to_bytes(4, byteorder='big')
f_block.add(block)
f_blocktx.add(block + tx)
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
def load_transactions_account(self, address, offset, limit):
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
:param address: Address to retrieve transactions for.
:type address: str, 0x-hex
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
:rtype: tuple
"""
rows = list_transactions_account_mined(self.session, address, offset, limit)
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
highest_block = -1;
lowest_block = -1;
for r in rows:
if highest_block == -1:
highest_block = r[0]
lowest_block = r[0]
block = r[0].to_bytes(4, byteorder='big')
tx = r[1].to_bytes(4, byteorder='big')
f_block.add(block)
f_blocktx.add(block + tx)
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)

View File

@ -0,0 +1,35 @@
# standard imports
import logging
# local imports
from .list import list_transactions_mined
from .list import list_transactions_account_mined
from .list import add_transaction
logg = logging.getLogger()
def dsn_from_config(config):
scheme = config.get('DATABASE_ENGINE')
if config.get('DATABASE_DRIVER') != None:
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
dsn = ''
if config.get('DATABASE_ENGINE') == 'sqlite':
dsn = '{}:///{}'.format(
scheme,
config.get('DATABASE_NAME'),
)
else:
dsn = '{}://{}:{}@{}:{}/{}'.format(
scheme,
config.get('DATABASE_USER'),
config.get('DATABASE_PASSWORD'),
config.get('DATABASE_HOST'),
config.get('DATABASE_PORT'),
config.get('DATABASE_NAME'),
)
logg.debug('parsed dsn from config: {}'.format(dsn))
return dsn

View File

@ -0,0 +1,79 @@
# standard imports
import logging
import datetime
# third-party imports
from cic_cache.db.models.base import SessionBase
logg = logging.getLogger()
def list_transactions_mined(
session,
offset,
limit,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
r = session.execute(s)
return r
def list_transactions_account_mined(
session,
address,
offset,
limit,
):
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
:param address: Address to retrieve transactions for.
:type address: str, 0x-hex
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
r = session.execute(s)
return r
def add_transaction(
session, tx_hash,
block_number,
tx_index,
sender,
receiver,
source_token,
destination_token,
from_value,
to_value,
success,
timestamp,
):
date_block = datetime.datetime.fromtimestamp(timestamp)
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
tx_hash,
block_number,
tx_index,
sender,
receiver,
source_token,
destination_token,
from_value,
to_value,
success,
date_block,
)
session.execute(s)

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,86 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to ./versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
#sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-cache
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,77 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,52 @@
"""Base tables
Revision ID: 63b629f14a85
Revises:
Create Date: 2020-12-04 08:16:00.412189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '63b629f14a85'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tx',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('date_registered', sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()),
sa.Column('block_number', sa.Integer, nullable=False),
sa.Column('tx_index', sa.Integer, nullable=False),
sa.Column('tx_hash', sa.String(66), nullable=False),
sa.Column('sender', sa.String(42), nullable=False),
sa.Column('recipient', sa.String(42), nullable=False),
sa.Column('source_token', sa.String(42), nullable=False),
sa.Column('destination_token', sa.String(42), nullable=False),
sa.Column('success', sa.Boolean, nullable=False),
sa.Column('from_value', sa.BIGINT(), nullable=False),
sa.Column('to_value', sa.BIGINT(), nullable=False),
sa.Column('date_block', sa.DateTime, nullable=False),
)
op.create_table(
'tx_sync',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tx', sa.String(66), nullable=False),
)
op.execute("INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');")
op.create_index('sender_token_idx', 'tx', ['sender', 'source_token'])
op.create_index('recipient_token_idx', 'tx', ['recipient', 'destination_token'])
def downgrade():
op.drop_index('recipient_token_idx')
op.drop_index('sender_token_idx')
op.drop_table('tx_sync')
op.drop_table('tx')

View File

@ -0,0 +1,102 @@
# stanard imports
import logging
# third-party imports
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
logg = logging.getLogger()
Model = declarative_base(name='Model')
class SessionBase(Model):
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
"""
__abstract__ = True
id = Column(Integer, primary_key=True)
engine = None
"""Database connection engine of the running aplication"""
sessionmaker = None
"""Factory object responsible for creating sessions from the connection pool"""
transactional = True
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
poolable = True
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
procedural = True
"""Whether the database backend supports stored procedures"""
localsessions = {}
"""Contains dictionary of sessions initiated by db model components"""
@staticmethod
def create_session():
"""Creates a new database session.
"""
return SessionBase.sessionmaker()
@staticmethod
def _set_engine(engine):
"""Sets the database engine static property
"""
SessionBase.engine = engine
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
@staticmethod
def connect(dsn, debug=False):
"""Create new database connection engine and connect to database backend.
:param dsn: DSN string defining connection.
:type dsn: str
"""
e = None
if SessionBase.poolable:
e = create_engine(
dsn,
max_overflow=50,
pool_pre_ping=True,
pool_size=20,
pool_recycle=10,
echo=debug,
)
else:
e = create_engine(
dsn,
echo=debug,
)
SessionBase._set_engine(e)
@staticmethod
def disconnect():
"""Disconnect from database and free resources.
"""
SessionBase.engine.dispose()
SessionBase.engine = None
@staticmethod
def bind_session(session=None):
localsession = session
if localsession == None:
localsession = SessionBase.create_session()
localsession_key = str(id(localsession))
logg.debug('creating new session {}'.format(localsession_key))
SessionBase.localsessions[localsession_key] = localsession
return localsession
@staticmethod
def release_session(session=None):
session_key = str(id(session))
if SessionBase.localsessions.get(session_key) != None:
logg.debug('destroying session {}'.format(session_key))
session.commit()
session.close()

View File

@ -0,0 +1,141 @@
# standard imports
import os
import re
import logging
import argparse
import json
import base64
# third-party imports
import confini
# local imports
from cic_cache import BloomCache
from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
dsn = dsn_from_config(config)
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
DEFAULT_LIMIT = 100
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
address = r[1]
if r[2] == None:
address = '0x' + address
offset = DEFAULT_LIMIT
if r.lastindex > 2:
offset = r[3]
limit = 0
if r.lastindex > 3:
limit = r[4]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
'filter_rounds': 3,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
# uwsgi application
def application(env, start_response):
headers = []
content = b''
session = SessionBase.create_session()
for handler in [
process_transactions_all_bloom,
process_transactions_account_bloom,
]:
r = handler(session, env)
if r != None:
(mime_type, content) = r
break
session.close()
headers.append(('Content-Length', str(len(content))),)
headers.append(('Access-Control-Allow-Origin', '*',));
if len(content) == 0:
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
start_response('404 Looked everywhere, sorry', headers)
else:
headers.append(('Content-Type', mime_type,))
start_response('200 OK', headers)
return [content]

View File

@ -0,0 +1,98 @@
# standard imports
import logging
import os
import sys
import argparse
# third-party imports
import celery
import confini
# local imports
from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase
from cic_cache.tasks.tx import *
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-q', type=str, default='cic-cache', help='queue name for worker tasks')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config.process()
# connect to database
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
# verify database connection with minimal sanity query
#session = SessionBase.create_session()
#session.execute('select version_num from alembic_version')
#session.close()
# set up celery
current_app = celery.Celery(__name__)
broker = config.get('CELERY_BROKER_URL')
if broker[:4] == 'file':
bq = tempfile.mkdtemp()
bp = tempfile.mkdtemp()
current_app.conf.update({
'broker_url': broker,
'broker_transport_options': {
'data_folder_in': bq,
'data_folder_out': bq,
'data_folder_processed': bp,
},
},
)
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
else:
current_app.conf.update({
'broker_url': broker,
})
result = config.get('CELERY_RESULT_URL')
if result[:4] == 'file':
rq = tempfile.mkdtemp()
current_app.conf.update({
'result_backend': 'file://{}'.format(rq),
})
logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
else:
current_app.conf.update({
'result_backend': result,
})
def main():
argv = ['worker']
if args.vv:
argv.append('--loglevel=DEBUG')
elif args.v:
argv.append('--loglevel=INFO')
argv.append('-Q')
argv.append(args.q)
argv.append('-n')
argv.append(args.q)
current_app.worker_main(argv)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,339 @@
# standard imports
import sys
import os
import argparse
import logging
import time
import enum
import re
# third-party imports
import confini
from cic_registry import CICRegistry
from cic_registry.chain import (
ChainRegistry,
ChainSpec,
)
#from cic_registry.bancor import BancorRegistryClient
from cic_registry.token import Token
from cic_registry.error import (
UnknownContractError,
UnknownDeclarationError,
)
from cic_registry.declaration import to_token_declaration
from web3.exceptions import BlockNotFound, TransactionNotFound
from websockets.exceptions import ConnectionClosedError
from requests.exceptions import ConnectionError
import web3
from web3 import HTTPProvider, WebsocketProvider
# local imports
from cic_cache import db
from cic_cache.db.models.base import SessionBase
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
log_topics = {
'transfer': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
'convert': '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095',
'accountregistry_add': '0a3b0a4f4c6e53dce3dbcad5614cb2ba3a0fa7326d03c5d64b4fa2d565492737',
}
config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('--trust-address', default=[], type=str, dest='trust_address', action='append', help='Set address as trust')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
args = argparser.parse_args(sys.argv[1:])
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config = confini.Config(config_dir, args.env_prefix)
config.process()
args_override = {
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
'CIC_TRUST_ADDRESS': ",".join(getattr(args, 'trust_address', [])),
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
# connect to database
dsn = db.dsn_from_config(config)
SessionBase.connect(dsn)
re_websocket = re.compile('^wss?://')
re_http = re.compile('^https?://')
blockchain_provider = config.get('ETH_PROVIDER')
if re.match(re_websocket, blockchain_provider) != None:
blockchain_provider = WebsocketProvider(blockchain_provider)
elif re.match(re_http, blockchain_provider) != None:
blockchain_provider = HTTPProvider(blockchain_provider)
else:
raise ValueError('unknown provider url {}'.format(blockchain_provider))
def web3_constructor():
w3 = web3.Web3(blockchain_provider)
return (blockchain_provider, w3)
class RunStateEnum(enum.IntEnum):
INIT = 0
RUN = 1
TERMINATE = 9
def rubberstamp(src):
return True
class Tracker:
def __init__(self, chain_spec, trusts=[]):
self.block_height = 0
self.tx_height = 0
self.state = RunStateEnum.INIT
self.declarator_cache = {}
self.convert_enabled = False
self.trusts = trusts
self.chain_spec = chain_spec
self.declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', 'Declarator')
def __process_tx(self, w3, session, t, r, l, b):
token_value = int(l.data, 16)
token_sender = l.topics[1][-20:].hex()
token_recipient = l.topics[2][-20:].hex()
#ts = ContractRegistry.get_address(t.address)
ts = CICRegistry.get_address(self.chain_spec, t.address())
logg.info('add token transfer {} value {} from {} to {}'.format(
ts.symbol(),
token_value,
token_sender,
token_recipient,
)
)
db.add_transaction(
session,
r.transactionHash.hex(),
r.blockNumber,
r.transactionIndex,
w3.toChecksumAddress(token_sender),
w3.toChecksumAddress(token_recipient),
t.address(),
t.address(),
token_value,
token_value,
r.status == 1,
b.timestamp,
)
session.flush()
# TODO: simplify/ split up and/or comment, function is too long
def __process_convert(self, w3, session, t, r, l, b):
logg.warning('conversions are deactivated')
return
# token_source = l.topics[2][-20:].hex()
# token_source = w3.toChecksumAddress(token_source)
# token_destination = l.topics[3][-20:].hex()
# token_destination = w3.toChecksumAddress(token_destination)
# data_noox = l.data[2:]
# d = data_noox[:64]
# token_from_value = int(d, 16)
# d = data_noox[64:128]
# token_to_value = int(d, 16)
# token_trader = '0x' + data_noox[192-40:]
#
# #ts = ContractRegistry.get_address(token_source)
# ts = CICRegistry.get_address(CICRegistry.bancor_chain_spec, t.address())
# #if ts == None:
# # ts = ContractRegistry.reserves[token_source]
# td = ContractRegistry.get_address(token_destination)
# #if td == None:
# # td = ContractRegistry.reserves[token_source]
# logg.info('add token convert {} -> {} value {} -> {} trader {}'.format(
# ts.symbol(),
# td.symbol(),
# token_from_value,
# token_to_value,
# token_trader,
# )
# )
#
# db.add_transaction(
# session,
# r.transactionHash.hex(),
# r.blockNumber,
# r.transactionIndex,
# w3.toChecksumAddress(token_trader),
# w3.toChecksumAddress(token_trader),
# token_source,
# token_destination,
# r.status == 1,
# b.timestamp,
# )
# session.flush()
def check_token(self, address):
t = None
try:
t = CICRegistry.get_address(CICRegistry.default_chain_spec, address)
return t
except UnknownContractError:
logg.debug('contract {} not in registry'.format(address))
# If nothing was returned, we look up the token in the declarator
for trust in self.trusts:
logg.debug('look up declaration for contract {} with trust {}'.format(address, trust))
fn = self.declarator.function('declaration')
# TODO: cache trust in LRUcache
declaration_array = fn(trust, address).call()
try:
declaration = to_token_declaration(trust, address, declaration_array, [rubberstamp])
logg.debug('found declaration for token {} from trust address {}'.format(address, trust))
except UnknownDeclarationError:
continue
try:
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
t = CICRegistry.add_token(self.chain_spec, c)
break
except ValueError:
logg.error('declaration for {} validates as token, but location is not ERC20 compatible'.format(address))
return t
# TODO use input data instead of logs
def process(self, w3, session, block):
#self.refresh_registry(w3)
tx_count = w3.eth.getBlockTransactionCount(block.hash)
b = w3.eth.getBlock(block.hash)
for i in range(self.tx_height, tx_count):
tx = w3.eth.getTransactionByBlock(block.hash, i)
if tx.to == None:
logg.debug('block {} tx {} is contract creation tx, skipping'.format(block.number, i))
continue
if len(w3.eth.getCode(tx.to)) == 0:
logg.debug('block {} tx {} not a contract tx, skipping'.format(block.number, i))
continue
t = self.check_token(tx.to)
if t != None and isinstance(t, Token):
r = w3.eth.getTransactionReceipt(tx.hash)
for l in r.logs:
logg.debug('block {} tx {} {} token log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex()))
if l.topics[0].hex() == log_topics['transfer']:
self.__process_tx(w3, session, t, r, l, b)
# TODO: cache contracts in LRUcache
elif self.convert_enabled and tx.to == CICRegistry.get_contract(CICRegistry.default_chain_spec, 'Converter').address:
r = w3.eth.getTransactionReceipt(tx.hash)
for l in r.logs:
logg.info('block {} tx {} {} bancornetwork log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex()))
if l.topics[0].hex() == log_topics['convert']:
self.__process_convert(w3, session, t, r, l, b)
session.execute("UPDATE tx_sync SET tx = '{}'".format(tx.hash.hex()))
session.commit()
self.tx_height += 1
def __get_next_retry(self, backoff=False):
return 1
def loop(self):
logg.info('starting at block {} tx index {}'.format(self.block_height, self.tx_height))
self.state = RunStateEnum.RUN
while self.state == RunStateEnum.RUN:
(provider, w3) = web3_constructor()
session = SessionBase.create_session()
try:
block = w3.eth.getBlock(self.block_height)
self.process(w3, session, block)
self.block_height += 1
self.tx_height = 0
except BlockNotFound as e:
logg.debug('no block {} yet, zZzZ...'.format(self.block_height))
time.sleep(self.__get_next_retry())
except ConnectionClosedError as e:
logg.info('connection gone, retrying')
time.sleep(self.__get_next_retry(True))
except OSError as e:
logg.error('cannot connect {}'.format(e))
time.sleep(self.__get_next_retry(True))
except Exception as e:
session.close()
raise(e)
session.close()
def load(self, w3):
session = SessionBase.create_session()
r = session.execute('SELECT tx FROM tx_sync').first()
if r != None:
if r[0] == '0x{0:0{1}X}'.format(0, 64):
logg.debug('last tx was zero-address, starting from scratch')
return
t = w3.eth.getTransaction(r[0])
self.block_height = t.blockNumber
self.tx_height = t.transactionIndex+1
c = w3.eth.getBlockTransactionCount(t.blockHash.hex())
logg.debug('last tx processed {} index {} (max index {})'.format(t.blockNumber, t.transactionIndex, c-1))
if c == self.tx_height:
self.block_height += 1
self.tx_height = 0
session.close()
(provider, w3) = web3_constructor()
trust = config.get('CIC_TRUST_ADDRESS', []).split(",")
chain_spec = args.i
try:
w3.eth.chainId
except Exception as e:
logg.exception(e)
sys.stderr.write('cannot connect to evm node\n')
sys.exit(1)
def main():
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
chain_registry = ChainRegistry(chain_spec)
CICRegistry.add_chain_registry(chain_registry)
t = Tracker(chain_spec, trust)
t.load(w3)
t.loop()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,38 @@
# third-party imports
import celery
# local imports
from cic_cache.cache import BloomCache
from cic_cache.db.models.base import SessionBase
celery_app = celery.current_app
@celery_app.task(bind=True)
def tx_filter(self, offset, limit, address=None, encoding='hex'):
queue = self.request.delivery_info.get('routing_key')
session = SessionBase.create_session()
c = BloomCache(session)
b = None
if address == None:
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
else:
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
session.close()
o = {
'alg': 'sha256',
'low': lowest_block,
'high': highest_block,
'block_filter': bloom_filter_block.hex(),
'blocktx_filter': bloom_filter_tx.hex(),
'filter_rounds': 3,
}
return o

View File

@ -0,0 +1,18 @@
import os
import semver
version = (
0,
2,
0,
'alpha.1',
)
version_object = semver.VersionInfo(
major=version[0],
minor=version[1],
patch=version[2],
prerelease=version[3],
)
version_string = str(version_object)

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis:///
result_url = redis:///

View File

@ -0,0 +1,4 @@
[cic]
registry_address =
chain_spec =
trust_address =

View File

@ -0,0 +1,9 @@
[database]
NAME=cic-eth
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=

View File

@ -0,0 +1,3 @@
[bancor]
registry_address =
dir = /usr/local/share/bancor

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379

View File

@ -0,0 +1,9 @@
[database]
NAME=cic_cache
USER=grassroots
PASSWORD=
HOST=localhost
PORT=63432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=1

View File

@ -0,0 +1,3 @@
[eth]
provider = ws://localhost:63546
chain_id = 8996

View File

@ -0,0 +1,7 @@
[eth]
provider = ws://localhost:8545
#ttp_provider = http://localhost:8545
#provider = http://localhost:8545
gas_provider_address =
#chain_id =
abi_dir = /usr/local/share/cic/solidity/abi

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -0,0 +1,2 @@
[cic]
registry_address =

View File

@ -0,0 +1,9 @@
[database]
NAME=cic-cache-test
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=

View File

@ -0,0 +1,5 @@
[eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545
#chain_id =

View File

@ -0,0 +1,5 @@
CREATE DATABASE "cic-cache";
CREATE DATABASE "cic-eth";
CREATE DATABASE "cic-notify";
CREATE DATABASE "cic-meta";
CREATE DATABASE "cic-signer";

View File

@ -0,0 +1,22 @@
CREATE TABLE tx (
id SERIAL PRIMARY KEY,
date_registered TIMESTAMP NOT NULL default CURRENT_TIMESTAMP,
block_number INTEGER NOT NULL,
tx_index INTEGER NOT NULL,
tx_hash VARCHAR(66) NOT NULL,
sender VARCHAR(42) NOT NULL,
recipient VARCHAR(42) NOT NULL,
source_token VARCHAR(42) NOT NULL,
destination_token VARCHAR(42) NOT NULL,
from_value BIGINT NOT NULL,
to_value BIGINT NOT NULL,
success BOOLEAN NOT NULL,
date_block TIMESTAMP NOT NULL
);
CREATE TABLE tx_sync (
id SERIAL PRIMARY KEY,
tx VARCHAR(66) NOT NULL
);
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');

View File

@ -0,0 +1,23 @@
CREATE TABLE tx (
id SERIAL PRIMARY KEY,
date_registered DATETIME NOT NULL default CURRENT_DATE,
block_number INTEGER NOT NULL,
tx_index INTEGER NOT NULL,
tx_hash VARCHAR(66) NOT NULL,
sender VARCHAR(42) NOT NULL,
recipient VARCHAR(42) NOT NULL,
source_token VARCHAR(42) NOT NULL,
destination_token VARCHAR(42) NOT NULL,
from_value INTEGER NOT NULL,
to_value INTEGER NOT NULL,
success BOOLEAN NOT NULL,
date_block DATETIME NOT NULL,
CHECK (success IN (0, 1))
);
CREATE TABLE tx_sync (
id SERIAL PRIMARY_KEY,
tx VARCHAR(66) NOT NULL
);
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');

View File

@ -0,0 +1,102 @@
openapi: "3.0.3"
info:
title: Grassroots Economics CIC Cache
description: Cache of processed transaction data from Ethereum blockchain and worker queues
termsOfService: bzz://grassrootseconomics.eth/terms
contact:
name: Grassroots Economics
url: https://www.grassrootseconomics.org
email: will@grassecon.org
license:
name: GPLv3
version: 0.1.0
paths:
/tx/{offset}/{limit}:
description: Bloom filter for batch of latest transactions
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: offset
in: path
schema:
type: integer
format: int32
- name: limit
in: path
schema:
type: integer
format: int32
/tx/{address}/{offset}/{limit}:
description: Bloom filter for batch of latest transactions by account
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: offset
in: path
schema:
type: integer
format: int32
- name: limit
in: path
schema:
type: integer
format: int32
components:
schemas:
BlocksBloom:
type: object
properties:
low:
type: int
format: int32
description: The lowest block number included in the filter
block_filter:
type: string
format: byte
description: Block number filter
blocktx_filter:
type: string
format: byte
description: Block and tx index filter
alg:
type: string
description: Hashing algorithm (currently only using sha256)
filter_rounds:
type: int
format: int32
description: Number of hash rounds used to create the filter

View File

@ -0,0 +1,54 @@
FROM python:3.8.6-slim-buster
#COPY --from=0 /usr/local/share/cic/solidity/ /usr/local/share/cic/solidity/
WORKDIR /usr/src/cic-cache
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
ARG root_requirement_file='requirements.txt'
#RUN apk update && \
# apk add gcc musl-dev gnupg libpq
#RUN apk add postgresql-dev
#RUN apk add linux-headers
#RUN apk add libffi-dev
RUN apt-get update && \
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
# Copy shared requirements from top of mono-repo
RUN echo "copying root req file ${root_requirement_file}"
COPY $root_requirement_file .
RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
COPY cic-cache/requirements.txt ./
COPY cic-cache/setup.cfg \
cic-cache/setup.py \
./
COPY cic-cache/cic_cache/ ./cic_cache/
COPY cic-cache/scripts/ ./scripts/
COPY cic-cache/test_requirements.txt ./
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
RUN pip install $pip_extra_index_url_flag .
RUN pip install .[server]
COPY cic-cache/tests/ ./tests/
#COPY db/ cic-cache/db
#RUN apk add postgresql-client
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY cic-cache/config/ /usr/local/etc/cic-cache/
# for db migrations
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
mkdir -p /usr/local/share/cic/solidity && \
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
# Tracker
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
# Server
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,40 @@
let xmlhttprequest = require('xhr2');
let moolb = require('moolb');
let xhr = new xmlhttprequest();
xhr.responseType = 'json';
xhr.open('GET', 'http://localhost:5555/tx/0/100');
xhr.addEventListener('load', (e) => {
d = xhr.response;
b_one = Buffer.from(d.block_filter, 'base64');
b_two = Buffer.from(d.blocktx_filter, 'base64');
for (let i = 0; i < 8192; i++) {
if (b_two[i] > 0) {
console.debug('value on', i, b_two[i]);
}
}
console.log(b_one, b_two);
let f_block = moolb.fromBytes(b_one, d.filter_rounds);
let f_blocktx = moolb.fromBytes(b_two, d.filter_rounds);
let a = new ArrayBuffer(8);
let w = new DataView(a);
for (let i = 410000; i < 430000; i++) {
w.setInt32(0, i);
let r = new Uint8Array(a.slice(0, 4));
if (f_block.check(r)) {
for (let j = 0; j < 200; j++) {
w = new DataView(a);
w.setInt32(4, j);
r = new Uint8Array(a);
if (f_blocktx.check(r)) {
console.log('true', i, j);
}
}
}
}
});
let r = xhr.send();

View File

@ -0,0 +1,10 @@
alembic==1.4.2
confini~=0.3.6b2
uwsgi==2.0.19.1
moolb~=0.1.0
cic-registry~=0.5.3a4
SQLAlchemy==1.3.20
semver==2.13.0
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3

View File

@ -0,0 +1,56 @@
#!/usr/bin/python
import os
import argparse
import logging
import alembic
from alembic.config import Config as AlembicConfig
import confini
from cic_cache.db import dsn_from_config
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
# BUG: the dbdir doesn't work after script install
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_dir):
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
migrations_dir = os.path.join(args.migrations_dir, 'default')
# connect to database
dsn = dsn_from_config(config)
logg.info('using migrations dir {}'.format(migrations_dir))
logg.info('using db {}'.format(dsn))
ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', dsn)
ac.set_main_option('script_location', migrations_dir)
alembic.command.upgrade(ac, 'head')

37
apps/cic-cache/setup.cfg Normal file
View File

@ -0,0 +1,37 @@
[metadata]
name = cic-cache
description = CIC Cache API and server
author = Louis Holbrook
author_email = dev@holbrook.no
url = https://gitlab.com/grassrootseconomics/cic-eth
keywords =
cic
cryptocurrency
ethereum
classifiers =
Programming Language :: Python :: 3
Operating System :: OS Independent
Development Status :: 3 - Alpha
Environment :: No Input/Output (Daemon)
Intended Audience :: Developers
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
Topic :: Internet
# Topic :: Blockchain :: EVM
license = GPL3
licence_files =
LICENSE.txt
[options]
python_requires = >= 3.6
packages =
cic_cache
cic_cache.db
cic_cache.db.models
cic_cache.runnable
scripts =
./scripts/migrate.py
[options.entry_points]
console_scripts =
cic-cache-tracker = cic_cache.runnable.tracker:main
cic-cache-server = cic_cache.runnable.server:main

60
apps/cic-cache/setup.py Normal file
View File

@ -0,0 +1,60 @@
from setuptools import setup
import configparser
import os
import time
from cic_cache.version import (
version_object,
version_string
)
class PleaseCommitFirstError(Exception):
pass
def git_hash():
import subprocess
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
if len(git_diff.stdout) > 0:
raise PleaseCommitFirstError()
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
return git_hash_brief
version_string = str(version_object)
try:
version_git = git_hash()
version_string += '+build.{}'.format(version_git)
except FileNotFoundError:
time_string_pair = str(time.time()).split('.')
version_string += '+build.{}{:<09d}'.format(
time_string_pair[0],
int(time_string_pair[1]),
)
print('final version string will be {}'.format(version_string))
requirements = []
f = open('requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
requirements.append(l.rstrip())
f.close()
test_requirements = []
f = open('test_requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
test_requirements.append(l.rstrip())
f.close()
setup(
version=version_string,
install_requires=requirements,
tests_require=test_requirements,
)

View File

@ -0,0 +1,6 @@
pytest==6.0.1
pytest-cov==2.10.1
pytest-mock==3.3.1
pysqlite3==0.4.3
sqlparse==0.4.1
pytest-celery==0.0.0a1

View File

@ -0,0 +1,86 @@
# standard imports
import os
import sys
import datetime
# third-party imports
import pytest
# local imports
from cic_cache import db
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
sys.path.insert(0, root_dir)
# fixtures
from tests.fixtures_config import *
from tests.fixtures_database import *
from tests.fixtures_celery import *
@pytest.fixture(scope='session')
def balances_dict_fields():
return {
'out_pending': 0,
'out_synced': 1,
'out_confirmed': 2,
'in_pending': 3,
'in_synced': 4,
'in_confirmed': 5,
}
@pytest.fixture(scope='function')
def txs(
init_database,
list_defaults,
list_actors,
list_tokens,
):
session = init_database
tx_number = 13
tx_hash_first = '0x' + os.urandom(32).hex()
val = 15000
nonce = 1
dt = datetime.datetime.utcnow()
db.add_transaction(
session,
tx_hash_first,
list_defaults['block'],
tx_number,
list_actors['alice'],
list_actors['bob'],
list_tokens['foo'],
list_tokens['foo'],
1024,
2048,
True,
dt.timestamp(),
)
tx_number = 42
tx_hash_second = '0x' + os.urandom(32).hex()
tx_signed_second = '0x' + os.urandom(128).hex()
nonce = 1
dt -= datetime.timedelta(hours=1)
db.add_transaction(
session,
tx_hash_second,
list_defaults['block']-1,
tx_number,
list_actors['diane'],
list_actors['alice'],
list_tokens['foo'],
list_tokens['foo'],
1024,
2048,
False,
dt.timestamp(),
)
session.commit()

View File

@ -0,0 +1,48 @@
# third-party imports
import pytest
import tempfile
import logging
import shutil
logg = logging.getLogger(__name__)
# celery fixtures
@pytest.fixture(scope='session')
def celery_includes():
return [
'cic_cache.tasks.tx',
]
@pytest.fixture(scope='session')
def celery_config():
bq = tempfile.mkdtemp()
bp = tempfile.mkdtemp()
rq = tempfile.mkdtemp()
logg.debug('celery broker queue {} processed {}'.format(bq, bp))
logg.debug('celery backend store {}'.format(rq))
yield {
'broker_url': 'filesystem://',
'broker_transport_options': {
'data_folder_in': bq,
'data_folder_out': bq,
'data_folder_processed': bp,
},
'result_backend': 'file://{}'.format(rq),
}
logg.debug('cleaning up celery filesystem backend files {} {} {}'.format(bq, bp, rq))
shutil.rmtree(bq)
shutil.rmtree(bp)
shutil.rmtree(rq)
@pytest.fixture(scope='session')
def celery_worker_parameters():
return {
# 'queues': ('cic-cache'),
}
@pytest.fixture(scope='session')
def celery_enable_logging():
return True

View File

@ -0,0 +1,20 @@
# standard imports
import os
import logging
# third-party imports
import pytest
import confini
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, '.config/test')
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))
return conf

View File

@ -0,0 +1,118 @@
# standard imports
import os
import logging
import re
# third-party imports
import pytest
import sqlparse
# local imports
from cic_cache.db.models.base import SessionBase
from cic_cache.db import dsn_from_config
logg = logging.getLogger(__file__)
@pytest.fixture(scope='function')
def database_engine(
load_config,
):
if load_config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.transactional = False
SessionBase.poolable = False
try:
os.unlink(load_config.get('DATABASE_NAME'))
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
return dsn
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
@pytest.fixture(scope='function')
def init_database(
load_config,
database_engine,
):
rootdir = os.path.dirname(os.path.dirname(__file__))
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
if load_config.get('DATABASE_ENGINE') == 'sqlite':
rconn = SessionBase.engine.raw_connection()
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
rconn.executescript(s)
else:
rconn = SessionBase.engine.raw_connection()
rcursor = rconn.cursor()
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
r = re.compile(r'^[A-Z]', re.MULTILINE)
for l in sqlparse.parse(s):
strl = str(l)
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
if not re.search(r, strl):
logg.warning('skipping parsed query line {}'.format(strl))
continue
rcursor.execute(strl)
rconn.commit()
rcursor.execute('SET search_path TO public')
# this doesn't work when run separately, no idea why
# functions have been manually added to original schema from cic-eth
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
#
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
rcursor.close()
session = SessionBase.create_session()
yield session
session.commit()
session.close()
@pytest.fixture(scope='function')
def list_tokens(
):
return {
'foo': '0x' + os.urandom(20).hex(),
'bar': '0x' + os.urandom(20).hex(),
}
@pytest.fixture(scope='function')
def list_actors(
):
return {
'alice': '0x' + os.urandom(20).hex(),
'bob': '0x' + os.urandom(20).hex(),
'charlie': '0x' + os.urandom(20).hex(),
'diane': '0x' + os.urandom(20).hex(),
}
@pytest.fixture(scope='function')
def list_defaults(
):
return {
'block': 420000,
}

View File

@ -0,0 +1,35 @@
# standard imports
import os
import datetime
import logging
import json
# third-party imports
import pytest
# local imports
from cic_cache import BloomCache
logg = logging.getLogger()
def test_cache(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
):
session = init_database
c = BloomCache(session)
b = c.load_transactions(0, 100)
assert b[0] == list_defaults['block'] - 1
c = BloomCache(session)
c.load_transactions_account(list_actors['alice'],0, 100)
assert b[0] == list_defaults['block'] - 1

View File

@ -0,0 +1,27 @@
# standard imports
import logging
# third-party imports
import celery
# local imports
from cic_cache.api import Api
logg = logging.getLogger()
def test_task(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
celery_session_worker,
):
api = Api(queue=None)
t = api.list(0, 100)
r = t.get()
logg.debug('r {}'.format(r))
assert r['low'] == list_defaults['block'] - 1

View File

@ -131,8 +131,8 @@ services:
cic-cache-tracker: cic-cache-tracker:
build: build:
context: apps/cic-cache/ context: apps
dockerfile: docker/Dockerfile dockerfile: cic-cache/docker/Dockerfile
environment: environment:
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545} ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
@ -167,8 +167,8 @@ services:
cic-cache-server: cic-cache-server:
build: build:
context: apps/cic-cache/ context: apps
dockerfile: docker/Dockerfile dockerfile: cic-cache/docker/Dockerfile
environment: environment:
DATABASE_USER: $DATABASE_USER DATABASE_USER: $DATABASE_USER
DATABASE_HOST: $DATABASE_HOST DATABASE_HOST: $DATABASE_HOST