cic-cache migrated
This commit is contained in:
parent
bc0450f39d
commit
85f26a4be4
@ -5,7 +5,6 @@ include:
|
||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- build
|
||||
|
0
.gitmodules
vendored
0
.gitmodules
vendored
1
apps/cic-cache
Submodule
1
apps/cic-cache
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 06c5f0fb0dca5992a7ffb12874b5cb0c9fdcd706
|
@ -1,2 +0,0 @@
|
||||
[bancor]
|
||||
dir =
|
@ -1,2 +0,0 @@
|
||||
[cic]
|
||||
registry_address =
|
@ -1,8 +0,0 @@
|
||||
[database]
|
||||
NAME=cic-eth
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
@ -1,6 +0,0 @@
|
||||
[eth]
|
||||
provider = ws://localhost:8545
|
||||
#ttp_provider = http://localhost:8545
|
||||
#provider = http://localhost:8545
|
||||
gas_provider_address =
|
||||
#chain_id =
|
@ -1,2 +0,0 @@
|
||||
[bancor]
|
||||
dir =
|
@ -1,2 +0,0 @@
|
||||
[cic]
|
||||
registry_address =
|
@ -1,8 +0,0 @@
|
||||
[database]
|
||||
NAME=cic-cache-test
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=sqlite
|
||||
DRIVER=pysqlite
|
@ -1,5 +0,0 @@
|
||||
[eth]
|
||||
#ws_provider = ws://localhost:8546
|
||||
#ttp_provider = http://localhost:8545
|
||||
provider = http://localhost:8545
|
||||
#chain_id =
|
@ -1,5 +0,0 @@
|
||||
[report]
|
||||
omit =
|
||||
.venv/*
|
||||
scripts/*
|
||||
cic_cache/db/postgres/*
|
@ -1,7 +0,0 @@
|
||||
set -a
|
||||
CICTEST_DATABASE_ENGINE=postgresql
|
||||
CICTEST_DATABASE_DRIVER=psycopg2
|
||||
CICTEST_DATABASE_HOST=localhost
|
||||
CICTEST_DATABASE_PORT=5432
|
||||
CICTEST_DATABASE_NAME=cic-eth-test
|
||||
set +a
|
8
apps/cic-cache/.gitignore
vendored
8
apps/cic-cache/.gitignore
vendored
@ -1,8 +0,0 @@
|
||||
.envrc
|
||||
.envrc_dev
|
||||
.venv
|
||||
__pycache__
|
||||
*.pyc
|
||||
_build
|
||||
doc/**/*.png
|
||||
doc/**/html
|
@ -1,22 +0,0 @@
|
||||
.cic_cache_variables:
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
||||
|
||||
.cic_cache_changes_target:
|
||||
rules:
|
||||
- changes:
|
||||
- $CONTEXT/$APP_NAME/*
|
||||
|
||||
build-mr-cic-cache:
|
||||
extends:
|
||||
- .cic_cache_changes_target
|
||||
- .py_build_merge_request
|
||||
- .cic_cache_variables
|
||||
|
||||
build-push-cic-cache:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_cache_variables
|
||||
|
||||
|
@ -1,11 +0,0 @@
|
||||
- 0.1.1
|
||||
* Add missing modules to setup
|
||||
- 0.1.0
|
||||
* Remove old APIs
|
||||
* Add bloom filter output APIs for all txs and per-account txs
|
||||
- 0.0.2
|
||||
* UWSGI server endpoint example
|
||||
* OpenAPI spec
|
||||
* stored procedures, test fixture for database schema
|
||||
- 0.0.1
|
||||
* Add json translators of transaction_list and balances stored procedure queries
|
@ -1 +0,0 @@
|
||||
from .cache import BloomCache
|
@ -1,89 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import moolb
|
||||
|
||||
# local imports
|
||||
from cic_cache.db import list_transactions_mined
|
||||
from cic_cache.db import list_transactions_account_mined
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
class BloomCache:
|
||||
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
@staticmethod
|
||||
def __get_filter_size(n):
|
||||
n = 8192 * 8
|
||||
logg.warning('filter size hardcoded to {}'.format(n))
|
||||
return n
|
||||
|
||||
|
||||
def load_transactions(self, offset, limit):
|
||||
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
||||
|
||||
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
||||
|
||||
For example, if the block number is 13 and the transaction index is 42, the input are:
|
||||
|
||||
block filter: 0x0d000000
|
||||
block+tx filter: 0x0d0000002a0000000
|
||||
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_mined(self.session, offset, limit)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
highest_block = -1
|
||||
lowest_block = -1
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
f_blocktx.add(block + tx)
|
||||
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||
|
||||
|
||||
def load_transactions_account(self, address, offset, limit):
|
||||
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
||||
|
||||
:param address: Address to retrieve transactions for.
|
||||
:type address: str, 0x-hex
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_account_mined(self.session, address, offset, limit)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
highest_block = -1;
|
||||
lowest_block = -1;
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
f_blocktx.add(block + tx)
|
||||
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
@ -1,35 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# local imports
|
||||
from .list import list_transactions_mined
|
||||
from .list import list_transactions_account_mined
|
||||
from .list import add_transaction
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def dsn_from_config(config):
|
||||
scheme = config.get('DATABASE_ENGINE')
|
||||
if config.get('DATABASE_DRIVER') != None:
|
||||
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
|
||||
|
||||
dsn = ''
|
||||
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
dsn = '{}:///{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_NAME'),
|
||||
)
|
||||
|
||||
else:
|
||||
dsn = '{}://{}:{}@{}:{}/{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_USER'),
|
||||
config.get('DATABASE_PASSWORD'),
|
||||
config.get('DATABASE_HOST'),
|
||||
config.get('DATABASE_PORT'),
|
||||
config.get('DATABASE_NAME'),
|
||||
)
|
||||
logg.debug('parsed dsn from config: {}'.format(dsn))
|
||||
return dsn
|
||||
|
@ -1,56 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
# third-party imports
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def list_transactions_mined(session, offset, limit):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_account_mined(session, address, offset, limit):
|
||||
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
||||
|
||||
:param address: Address to retrieve transactions for.
|
||||
:type address: str, 0x-hex
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def add_transaction(session, tx_hash, block_number, tx_index, sender, receiver, source_token, destination_token, success, timestamp):
|
||||
date_block = datetime.datetime.fromtimestamp(timestamp)
|
||||
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, '{}')".format(
|
||||
tx_hash,
|
||||
block_number,
|
||||
tx_index,
|
||||
sender,
|
||||
receiver,
|
||||
source_token,
|
||||
destination_token,
|
||||
success,
|
||||
date_block,
|
||||
)
|
||||
session.execute(s)
|
@ -1 +0,0 @@
|
||||
Generic single-database configuration.
|
@ -1,86 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to ./versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat ./versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
#sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-cache
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks=black
|
||||
# black.type=console_scripts
|
||||
# black.entrypoint=black
|
||||
# black.options=-l 79
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
@ -1,77 +0,0 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
@ -1,24 +0,0 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -1,44 +0,0 @@
|
||||
"""Base tables
|
||||
|
||||
Revision ID: 63b629f14a85
|
||||
Revises:
|
||||
Create Date: 2020-12-04 08:16:00.412189
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '63b629f14a85'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'tx',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('date_registered', sa.DateTime, nullable=False),
|
||||
sa.Column('block_number', sa.Integer, nullable=False),
|
||||
sa.Column('tx_index', sa.Integer, nullable=False),
|
||||
sa.Column('tx_hash', sa.String(66), nullable=False),
|
||||
sa.Column('sender', sa.String(42), nullable=False),
|
||||
sa.Column('recipient', sa.String(42), nullable=False),
|
||||
sa.Column('source_token', sa.String(42), nullable=False),
|
||||
sa.Column('recipient_token', sa.String(42), nullable=False),
|
||||
sa.Column('success', sa.Boolean, nullable=False),
|
||||
sa.Column('date_block', sa.DateTime, nullable=False),
|
||||
)
|
||||
op.create_table(
|
||||
'tx_sync',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('tx', sa.String(66), nullable=False),
|
||||
)
|
||||
|
||||
op.execute("INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');")
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('tx_sync')
|
||||
op.drop_table('tx')
|
@ -1,48 +0,0 @@
|
||||
# third-party imports
|
||||
from sqlalchemy import Column, Integer
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
Model = declarative_base(name='Model')
|
||||
|
||||
|
||||
class SessionBase(Model):
|
||||
__abstract__ = True
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
engine = None
|
||||
query = None
|
||||
sessionmaker = None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def create_session():
|
||||
#SessionBase.session = session()
|
||||
#return SessionBase.session
|
||||
return SessionBase.sessionmaker()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _set_engine(engine):
|
||||
SessionBase.engine = engine
|
||||
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def build():
|
||||
Model.metadata.create_all(bind=SessionBase.engine)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def connect(dsn):
|
||||
e = create_engine(dsn)
|
||||
SessionBase._set_engine(e)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def disconnect():
|
||||
SessionBase.engine.dispose()
|
||||
SessionBase.session = None
|
||||
SessionBase.engine = None
|
@ -1,141 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import argparse
|
||||
import json
|
||||
import base64
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
from cic_cache import BloomCache
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
||||
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
|
||||
def process_transactions_account_bloom(session, env):
|
||||
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
|
||||
address = r[1]
|
||||
if r[2] == None:
|
||||
address = '0x' + address
|
||||
offset = DEFAULT_LIMIT
|
||||
if r.lastindex > 2:
|
||||
offset = r[3]
|
||||
limit = 0
|
||||
if r.lastindex > 3:
|
||||
limit = r[4]
|
||||
|
||||
c = BloomCache(session)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||
|
||||
o = {
|
||||
'alg': 'sha256',
|
||||
'low': lowest_block,
|
||||
'high': highest_block,
|
||||
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||
'filter_rounds': 3,
|
||||
}
|
||||
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
|
||||
def process_transactions_all_bloom(session, env):
|
||||
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
|
||||
offset = DEFAULT_LIMIT
|
||||
if r.lastindex > 0:
|
||||
offset = r[1]
|
||||
limit = 0
|
||||
if r.lastindex > 1:
|
||||
limit = r[2]
|
||||
|
||||
c = BloomCache(session)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||
|
||||
o = {
|
||||
'alg': 'sha256',
|
||||
'low': lowest_block,
|
||||
'high': highest_block,
|
||||
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||
'filter_rounds': 3,
|
||||
}
|
||||
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
|
||||
# uwsgi application
|
||||
def application(env, start_response):
|
||||
|
||||
headers = []
|
||||
content = b''
|
||||
|
||||
session = SessionBase.create_session()
|
||||
for handler in [
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_account_bloom,
|
||||
]:
|
||||
r = handler(session, env)
|
||||
if r != None:
|
||||
(mime_type, content) = r
|
||||
break
|
||||
session.close()
|
||||
|
||||
headers.append(('Content-Length', str(len(content))),)
|
||||
headers.append(('Access-Control-Allow-Origin', '*',));
|
||||
|
||||
if len(content) == 0:
|
||||
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||
start_response('404 Looked everywhere, sorry', headers)
|
||||
else:
|
||||
headers.append(('Content-Type', mime_type,))
|
||||
start_response('200 OK', headers)
|
||||
|
||||
return [content]
|
@ -1,284 +0,0 @@
|
||||
# standard imports
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import time
|
||||
import enum
|
||||
import re
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
from cic_registry import CICRegistry
|
||||
from cic_registry.bancor import BancorRegistry
|
||||
from cic_registry.token import Token
|
||||
from cic_registry.error import UnknownContractError
|
||||
from web3.exceptions import BlockNotFound, TransactionNotFound
|
||||
from websockets.exceptions import ConnectionClosedError
|
||||
from requests.exceptions import ConnectionError
|
||||
import web3
|
||||
from web3 import HTTPProvider, WebsocketProvider
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||
|
||||
log_topics = {
|
||||
'transfer': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
|
||||
'convert': '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095',
|
||||
}
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
# connect to database
|
||||
dsn = db.dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
|
||||
re_websocket = re.compile('^wss?://')
|
||||
re_http = re.compile('^https?://')
|
||||
blockchain_provider = config.get('ETH_PROVIDER')
|
||||
if re.match(re_websocket, blockchain_provider) != None:
|
||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||
elif re.match(re_http, blockchain_provider) != None:
|
||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||
else:
|
||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||
|
||||
def web3_constructor():
|
||||
w3 = web3.Web3(blockchain_provider)
|
||||
return (blockchain_provider, w3)
|
||||
|
||||
|
||||
class RunStateEnum(enum.IntEnum):
|
||||
INIT = 0
|
||||
RUN = 1
|
||||
TERMINATE = 9
|
||||
|
||||
|
||||
class Tracker:
|
||||
|
||||
def __init__(self):
|
||||
self.block_height = 0
|
||||
self.tx_height = 0
|
||||
self.state = RunStateEnum.INIT
|
||||
|
||||
|
||||
def refresh_registry(self, w3):
|
||||
cr = CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'ConverterRegistry')
|
||||
f = cr.function('getConvertibleTokens')
|
||||
anchors = f().call()
|
||||
# TODO: if there are other token sources, this number may not match anymore. The cache count method should be moved to bancorregistry object instead
|
||||
r = CICRegistry.get_chain_registry(CICRegistry.bancor_chain_spec)
|
||||
#logg.debug('anchors {} {}'.format(anchors, ContractRegistry.cache_token_count()))
|
||||
if len(anchors) != r.cache_token_count():
|
||||
logg.debug('token count mismatch, scanning')
|
||||
|
||||
for a in anchors:
|
||||
if ContractRegistry.get_address(a) == None:
|
||||
abi = CICRegistry.abi('ERC20Token')
|
||||
#abi = ContractRegistry.contracts['ERC20Token'].contract.abi
|
||||
c = w3.eth.contract(address=a, abi=abi)
|
||||
t = ContractRegistry.add_token(a, c)
|
||||
logg.info('new token {} at {}'.format(t.symbol(), t.address))
|
||||
|
||||
|
||||
|
||||
def __process_tx(self, w3, session, t, r, l, b):
|
||||
token_value = int(l.data, 16)
|
||||
token_sender = l.topics[1][-20:].hex()
|
||||
token_recipient = l.topics[2][-20:].hex()
|
||||
|
||||
ts = ContractRegistry.get_address(t.address)
|
||||
logg.info('add token transfer {} value {} from {} to {}'.format(
|
||||
ts.symbol(),
|
||||
token_value,
|
||||
token_sender,
|
||||
token_recipient,
|
||||
)
|
||||
)
|
||||
|
||||
logg.debug('block', b)
|
||||
db.add_transaction(
|
||||
session,
|
||||
r.transactionHash.hex(),
|
||||
r.blockNumber,
|
||||
r.transactionIndex,
|
||||
w3.toChecksumAddress(token_sender),
|
||||
w3.toChecksumAddress(token_recipient),
|
||||
t.address,
|
||||
t.address,
|
||||
r.status == 1,
|
||||
b.timestamp,
|
||||
)
|
||||
session.flush()
|
||||
|
||||
|
||||
# TODO: simplify/ split up and/or comment, function is too long
|
||||
def __process_convert(self, w3, session, t, r, l, b):
|
||||
token_source = l.topics[2][-20:].hex()
|
||||
token_source = w3.toChecksumAddress(token_source)
|
||||
token_destination = l.topics[3][-20:].hex()
|
||||
token_destination = w3.toChecksumAddress(token_destination)
|
||||
data_noox = l.data[2:]
|
||||
d = data_noox[:64]
|
||||
token_from_value = int(d, 16)
|
||||
d = data_noox[64:128]
|
||||
token_to_value = int(d, 16)
|
||||
token_trader = '0x' + data_noox[192-40:]
|
||||
|
||||
ts = ContractRegistry.get_address(token_source)
|
||||
if ts == None:
|
||||
ts = ContractRegistry.reserves[token_source]
|
||||
td = ContractRegistry.get_address(token_destination)
|
||||
if td == None:
|
||||
td = ContractRegistry.reserves[token_source]
|
||||
logg.info('add token convert {} -> {} value {} -> {} trader {}'.format(
|
||||
ts.symbol(),
|
||||
td.symbol(),
|
||||
token_from_value,
|
||||
token_to_value,
|
||||
token_trader,
|
||||
)
|
||||
)
|
||||
|
||||
db.add_transaction(
|
||||
session,
|
||||
r.transactionHash.hex(),
|
||||
r.blockNumber,
|
||||
r.transactionIndex,
|
||||
w3.toChecksumAddress(token_trader),
|
||||
w3.toChecksumAddress(token_trader),
|
||||
token_source,
|
||||
token_destination,
|
||||
r.status == 1,
|
||||
b.timestamp,
|
||||
)
|
||||
session.flush()
|
||||
|
||||
|
||||
def process(self, w3, session, block):
|
||||
self.refresh_registry(w3)
|
||||
tx_count = w3.eth.getBlockTransactionCount(block.hash)
|
||||
b = w3.eth.getBlock(block.hash)
|
||||
for i in range(self.tx_height, tx_count):
|
||||
tx = w3.eth.getTransactionByBlock(block.hash, i)
|
||||
t = None
|
||||
try:
|
||||
t = CICRegistry.get_address(CICRegistry.bancor_chain_spec, tx.to)
|
||||
except UnknownContractError:
|
||||
logg.debug('block {} tx {} not our contract, skipping'.format(block, i))
|
||||
continue
|
||||
logg.debug('block tx {} {}'.format(block.number, i))
|
||||
if t != None and isinstance(t, Token):
|
||||
r = w3.eth.getTransactionReceipt(tx.hash)
|
||||
for l in r.logs:
|
||||
logg.info('{} token log {} {}'.format(tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
||||
if l.topics[0].hex() == log_topics['transfer']:
|
||||
self.__process_tx(w3, session, t, r, l, b)
|
||||
|
||||
elif tx.to == CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'BancorNetwork').address:
|
||||
r = w3.eth.getTransactionReceipt(tx.hash)
|
||||
for l in r.logs:
|
||||
logg.info('{} bancornetwork log {} {}'.format(tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
||||
if l.topics[0].hex() == log_topics['convert']:
|
||||
self.__process_convert(w3, session, t, r, l, b)
|
||||
|
||||
|
||||
session.execute("UPDATE tx_sync SET tx = '{}'".format(tx.hash.hex()))
|
||||
session.commit()
|
||||
self.tx_height += 1
|
||||
|
||||
|
||||
def __get_next_retry(self, backoff=False):
|
||||
return 1
|
||||
|
||||
|
||||
def loop(self, bancor_registry):
|
||||
logg.info('starting at block {} tx index {}'.format(self.block_height, self.tx_height))
|
||||
self.state = RunStateEnum.RUN
|
||||
while self.state == RunStateEnum.RUN:
|
||||
(provider, w3) = web3_constructor()
|
||||
session = SessionBase.create_session()
|
||||
try:
|
||||
block = w3.eth.getBlock(self.block_height)
|
||||
self.process(w3, session, block)
|
||||
self.block_height += 1
|
||||
self.tx_height = 0
|
||||
except BlockNotFound as e:
|
||||
logg.debug('no block {} yet, zZzZ...'.format(self.block_height))
|
||||
time.sleep(self.__get_next_retry())
|
||||
except ConnectionClosedError as e:
|
||||
logg.info('connection gone, retrying')
|
||||
time.sleep(self.__get_next_retry(True))
|
||||
except OSError as e:
|
||||
logg.error('cannot connect {}'.format(e))
|
||||
time.sleep(self.__get_next_retry(True))
|
||||
except Exception as e:
|
||||
session.close()
|
||||
raise(e)
|
||||
session.close()
|
||||
|
||||
|
||||
def load(self, w3):
|
||||
session = SessionBase.create_session()
|
||||
r = session.execute('SELECT tx FROM tx_sync').first()
|
||||
if r != None:
|
||||
if r[0] == '0x{0:0{1}X}'.format(0, 64):
|
||||
logg.debug('last tx was zero-address, starting from scratch')
|
||||
return
|
||||
t = w3.eth.getTransaction(r[0])
|
||||
|
||||
self.block_height = t.blockNumber
|
||||
self.tx_height = t.transactionIndex+1
|
||||
c = w3.eth.getBlockTransactionCount(t.blockHash.hex())
|
||||
logg.debug('last tx processed {} index {} (max index {})'.format(t.blockNumber, t.transactionIndex, c-1))
|
||||
if c == self.tx_height:
|
||||
self.block_height += 1
|
||||
self.tx_height = 0
|
||||
session.close()
|
||||
|
||||
def main():
|
||||
(provider, w3) = web3_constructor()
|
||||
CICRegistry.finalize(w3, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
bancor_registry_contract = CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'BancorRegistry')
|
||||
bancor_chain_registry = CICRegistry.get_chain_registry(CICRegistry.bancor_chain_spec)
|
||||
bancor_registry = BancorRegistry(w3, bancor_chain_registry, bancor_registry_contract.address(), config.get('BANCOR_DIR'))
|
||||
bancor_registry.load()
|
||||
|
||||
#bancor.load(w3, config.get('BANCOR_REGISTRY_ADDRESS'), config.get('BANCOR_DIR'))
|
||||
|
||||
t = Tracker()
|
||||
t.load(w3)
|
||||
t.loop(bancor_registry)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,5 +0,0 @@
|
||||
CREATE DATABASE "cic-cache";
|
||||
CREATE DATABASE "cic-eth";
|
||||
CREATE DATABASE "cic-notify";
|
||||
CREATE DATABASE "cic-meta";
|
||||
CREATE DATABASE "cic-signer";
|
@ -1,20 +0,0 @@
|
||||
CREATE TABLE tx (
|
||||
id SERIAL PRIMARY KEY,
|
||||
date_registered TIMESTAMP NOT NULL default CURRENT_TIMESTAMP,
|
||||
block_number INTEGER NOT NULL,
|
||||
tx_index INTEGER NOT NULL,
|
||||
tx_hash VARCHAR(66) NOT NULL,
|
||||
sender VARCHAR(42) NOT NULL,
|
||||
recipient VARCHAR(42) NOT NULL,
|
||||
source_token VARCHAR(42) NOT NULL,
|
||||
destination_token VARCHAR(42) NOT NULL,
|
||||
success BOOLEAN NOT NULL,
|
||||
date_block TIMESTAMP NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tx_sync (
|
||||
id SERIAL PRIMARY KEY,
|
||||
tx VARCHAR(66) NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');
|
@ -1,21 +0,0 @@
|
||||
CREATE TABLE tx (
|
||||
id SERIAL PRIMARY KEY,
|
||||
date_registered DATETIME NOT NULL default CURRENT_DATE,
|
||||
block_number INTEGER NOT NULL,
|
||||
tx_index INTEGER NOT NULL,
|
||||
tx_hash VARCHAR(66) NOT NULL,
|
||||
sender VARCHAR(42) NOT NULL,
|
||||
recipient VARCHAR(42) NOT NULL,
|
||||
source_token VARCHAR(42) NOT NULL,
|
||||
destination_token VARCHAR(42) NOT NULL,
|
||||
success BOOLEAN NOT NULL,
|
||||
date_block DATETIME NOT NULL,
|
||||
CHECK (success IN (0, 1))
|
||||
);
|
||||
|
||||
CREATE TABLE tx_sync (
|
||||
id SERIAL PRIMARY_KEY,
|
||||
tx VARCHAR(66) NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');
|
@ -1,102 +0,0 @@
|
||||
openapi: "3.0.3"
|
||||
info:
|
||||
title: Grassroots Economics CIC Cache
|
||||
description: Cache of processed transaction data from Ethereum blockchain and worker queues
|
||||
termsOfService: bzz://grassrootseconomics.eth/terms
|
||||
contact:
|
||||
name: Grassroots Economics
|
||||
url: https://www.grassrootseconomics.org
|
||||
email: will@grassecon.org
|
||||
license:
|
||||
name: GPLv3
|
||||
version: 0.1.0
|
||||
|
||||
paths:
|
||||
/tx/{offset}/{limit}:
|
||||
description: Bloom filter for batch of latest transactions
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
|
||||
parameters:
|
||||
- name: offset
|
||||
in: path
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: limit
|
||||
in: path
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
|
||||
/tx/{address}/{offset}/{limit}:
|
||||
description: Bloom filter for batch of latest transactions by account
|
||||
get:
|
||||
tags:
|
||||
- transactions
|
||||
description:
|
||||
Retrieve transactions
|
||||
operationId: tx.get
|
||||
responses:
|
||||
200:
|
||||
description: Transaction query successful.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BlocksBloom"
|
||||
|
||||
|
||||
parameters:
|
||||
- name: address
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: offset
|
||||
in: path
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
- name: limit
|
||||
in: path
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
|
||||
components:
|
||||
schemas:
|
||||
BlocksBloom:
|
||||
type: object
|
||||
properties:
|
||||
low:
|
||||
type: int
|
||||
format: int32
|
||||
description: The lowest block number included in the filter
|
||||
block_filter:
|
||||
type: string
|
||||
format: byte
|
||||
description: Block number filter
|
||||
blocktx_filter:
|
||||
type: string
|
||||
format: byte
|
||||
description: Block and tx index filter
|
||||
alg:
|
||||
type: string
|
||||
description: Hashing algorithm (currently only using sha256)
|
||||
filter_rounds:
|
||||
type: int
|
||||
format: int32
|
||||
description: Number of hash rounds used to create the filter
|
@ -1,39 +0,0 @@
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-bancor:master-07951c84
|
||||
|
||||
ARG pip_extra_index_url_flag='--extra-index-url https://pip.grassrootseconomics.net:8433'
|
||||
|
||||
RUN apk add postgresql-dev linux-headers git
|
||||
# RUN apk add linux-headers
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
RUN mkdir -vp cic-cache
|
||||
|
||||
COPY requirements.txt cic-cache/
|
||||
COPY setup.* cic-cache/
|
||||
COPY cic_cache/ cic-cache/cic_cache/
|
||||
COPY scripts/ cic-cache/scripts/
|
||||
RUN cd cic-cache && \
|
||||
pip install $pip_extra_index_url_flag .
|
||||
|
||||
RUN cd cic-cache && \
|
||||
pip install .[server]
|
||||
|
||||
COPY tests/ cic-cache/tests/
|
||||
#COPY db/ cic-cache/db
|
||||
#RUN apk add postgresql-client
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY .config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
|
||||
# Tracker
|
||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# Server
|
||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
3616
apps/cic-cache/examples/bloom_client/package-lock.json
generated
3616
apps/cic-cache/examples/bloom_client/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,40 +0,0 @@
|
||||
let xmlhttprequest = require('xhr2');
|
||||
let moolb = require('moolb');
|
||||
|
||||
let xhr = new xmlhttprequest();
|
||||
xhr.responseType = 'json';
|
||||
xhr.open('GET', 'http://localhost:5555/tx/0/100');
|
||||
xhr.addEventListener('load', (e) => {
|
||||
|
||||
d = xhr.response;
|
||||
|
||||
b_one = Buffer.from(d.block_filter, 'base64');
|
||||
b_two = Buffer.from(d.blocktx_filter, 'base64');
|
||||
|
||||
for (let i = 0; i < 8192; i++) {
|
||||
if (b_two[i] > 0) {
|
||||
console.debug('value on', i, b_two[i]);
|
||||
}
|
||||
}
|
||||
console.log(b_one, b_two);
|
||||
|
||||
let f_block = moolb.fromBytes(b_one, d.filter_rounds);
|
||||
let f_blocktx = moolb.fromBytes(b_two, d.filter_rounds);
|
||||
let a = new ArrayBuffer(8);
|
||||
let w = new DataView(a);
|
||||
for (let i = 410000; i < 430000; i++) {
|
||||
w.setInt32(0, i);
|
||||
let r = new Uint8Array(a.slice(0, 4));
|
||||
if (f_block.check(r)) {
|
||||
for (let j = 0; j < 200; j++) {
|
||||
w = new DataView(a);
|
||||
w.setInt32(4, j);
|
||||
r = new Uint8Array(a);
|
||||
if (f_blocktx.check(r)) {
|
||||
console.log('true', i, j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
let r = xhr.send();
|
@ -1,6 +0,0 @@
|
||||
alembic==1.4.2
|
||||
confini==0.3.1
|
||||
uwsgi==2.0.19.1
|
||||
moolb==0.1.0
|
||||
cic-registry==0.3.8
|
||||
SQLAlchemy==1.3.19
|
@ -1,56 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
import confini
|
||||
|
||||
from cic_cache.db import dsn_from_config
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
# BUG: the dbdir doesn't work after script install
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
|
||||
if not os.path.isdir(migrations_dir):
|
||||
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||
migrations_dir = os.path.join(args.migrations_dir, 'default')
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
|
||||
|
||||
logg.info('using migrations dir {}'.format(migrations_dir))
|
||||
logg.info('using db {}'.format(dsn))
|
||||
ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', dsn)
|
||||
ac.set_main_option('script_location', migrations_dir)
|
||||
|
||||
alembic.command.upgrade(ac, 'head')
|
@ -1,55 +0,0 @@
|
||||
[metadata]
|
||||
name = cic-cache
|
||||
version = 0.1.1
|
||||
description = CIC Cache API and server
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/grassrootseconomics/cic-eth
|
||||
keywords =
|
||||
cic
|
||||
cryptocurrency
|
||||
ethereum
|
||||
classifiers =
|
||||
Programming Language :: Python :: 3
|
||||
Operating System :: OS Independent
|
||||
Development Status :: 3 - Alpha
|
||||
Environment :: No Input/Output (Daemon)
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||
Topic :: Internet
|
||||
# Topic :: Blockchain :: EVM
|
||||
license = GPL3
|
||||
licence_files =
|
||||
LICENSE.txt
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
packages =
|
||||
cic_cache
|
||||
cic_cache.db
|
||||
cic_cache.db.models
|
||||
cic_cache.runnable
|
||||
install_requires =
|
||||
alembic==1.4.2
|
||||
web3==5.12.2
|
||||
confini==0.3.2
|
||||
cic-registry==0.3.9
|
||||
moolb==0.1.0
|
||||
SQLAlchemy==1.3.19
|
||||
psycopg2==2.8.6
|
||||
tests_require =
|
||||
pytest==6.0.1
|
||||
pytest-cov==2.10.1
|
||||
pysqlite==0.4.3
|
||||
sqlparse==0.4.1
|
||||
dependency_links =
|
||||
https://pip.grassrootseconomics.net:8433/cic-registry
|
||||
scripts =
|
||||
./scripts/migrate.py
|
||||
|
||||
[options.extras_require]
|
||||
server = uWSGI==2.0.19.1
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
cic-cache-tracker = cic_cache.runnable.tracker:main
|
@ -1,4 +0,0 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
)
|
@ -1,5 +0,0 @@
|
||||
pytest==6.0.1
|
||||
pytest-cov==2.10.1
|
||||
pytest-mock==3.3.1
|
||||
pysqlite3==0.4.3
|
||||
sqlparse==0.4.1
|
@ -1,26 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import sys
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
sys.path.insert(0, root_dir)
|
||||
|
||||
# fixtures
|
||||
from test.fixtures_config import *
|
||||
from test.fixtures_database import *
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def balances_dict_fields():
|
||||
return {
|
||||
'out_pending': 0,
|
||||
'out_synced': 1,
|
||||
'out_confirmed': 2,
|
||||
'in_pending': 3,
|
||||
'in_synced': 4,
|
||||
'in_confirmed': 5,
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
import confini
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
logg = logging.getLogger(__file__)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def load_config():
|
||||
config_dir = os.path.join(root_dir, '.config/test')
|
||||
conf = confini.Config(config_dir, 'CICTEST')
|
||||
conf.process()
|
||||
logg.debug('config {}'.format(conf))
|
||||
return conf
|
@ -1,115 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
import sqlparse
|
||||
|
||||
# local imports
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.db import dsn_from_config
|
||||
|
||||
logg = logging.getLogger(__file__)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def database_engine(
|
||||
load_config,
|
||||
):
|
||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
try:
|
||||
os.unlink(load_config.get('DATABASE_NAME'))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
dsn = dsn_from_config(load_config)
|
||||
SessionBase.connect(dsn)
|
||||
return dsn
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def init_database(
|
||||
load_config,
|
||||
database_engine,
|
||||
):
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
|
||||
|
||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
rconn = SessionBase.engine.raw_connection()
|
||||
f = open(os.path.join(schemadir, 'db.sql'))
|
||||
s = f.read()
|
||||
f.close()
|
||||
rconn.executescript(s)
|
||||
|
||||
else:
|
||||
rconn = SessionBase.engine.raw_connection()
|
||||
rcursor = rconn.cursor()
|
||||
|
||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
|
||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
|
||||
|
||||
f = open(os.path.join(schemadir, 'db.sql'))
|
||||
s = f.read()
|
||||
f.close()
|
||||
r = re.compile(r'^[A-Z]', re.MULTILINE)
|
||||
for l in sqlparse.parse(s):
|
||||
strl = str(l)
|
||||
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
|
||||
if not re.search(r, strl):
|
||||
logg.warning('skipping parsed query line {}'.format(strl))
|
||||
continue
|
||||
rcursor.execute(strl)
|
||||
rconn.commit()
|
||||
|
||||
rcursor.execute('SET search_path TO public')
|
||||
|
||||
# this doesn't work when run separately, no idea why
|
||||
# functions have been manually added to original schema from cic-eth
|
||||
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
|
||||
# s = f.read()
|
||||
# f.close()
|
||||
# rcursor.execute(s)
|
||||
#
|
||||
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
|
||||
# s = f.read()
|
||||
# f.close()
|
||||
# rcursor.execute(s)
|
||||
|
||||
rcursor.close()
|
||||
|
||||
session = SessionBase.create_session()
|
||||
yield session
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def list_tokens(
|
||||
):
|
||||
return {
|
||||
'foo': '0x' + os.urandom(20).hex(),
|
||||
'bar': '0x' + os.urandom(20).hex(),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def list_actors(
|
||||
):
|
||||
return {
|
||||
'alice': '0x' + os.urandom(20).hex(),
|
||||
'bob': '0x' + os.urandom(20).hex(),
|
||||
'charlie': '0x' + os.urandom(20).hex(),
|
||||
'diane': '0x' + os.urandom(20).hex(),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def list_defaults(
|
||||
):
|
||||
|
||||
return {
|
||||
'block': 420000,
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import datetime
|
||||
import logging
|
||||
import json
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
from cic_cache import BloomCache
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def test_cache(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_number = 13
|
||||
tx_hash_first = '0x' + os.urandom(32).hex()
|
||||
val = 15000
|
||||
nonce = 1
|
||||
dt = datetime.datetime.utcnow()
|
||||
db.add_transaction(
|
||||
session,
|
||||
tx_hash_first,
|
||||
list_defaults['block'],
|
||||
tx_number,
|
||||
list_actors['alice'],
|
||||
list_actors['bob'],
|
||||
list_tokens['foo'],
|
||||
list_tokens['foo'],
|
||||
True,
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
|
||||
tx_number = 42
|
||||
tx_hash_second = '0x' + os.urandom(32).hex()
|
||||
tx_signed_second = '0x' + os.urandom(128).hex()
|
||||
nonce = 1
|
||||
dt -= datetime.timedelta(hours=1)
|
||||
db.add_transaction(
|
||||
session,
|
||||
tx_hash_second,
|
||||
list_defaults['block']-1,
|
||||
tx_number,
|
||||
list_actors['diane'],
|
||||
list_actors['alice'],
|
||||
list_tokens['foo'],
|
||||
list_tokens['foo'],
|
||||
False,
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
session.commit()
|
||||
|
||||
c = BloomCache(session)
|
||||
b = c.load_transactions(0, 100)
|
||||
|
||||
assert b[0] == list_defaults['block'] - 1
|
||||
|
||||
c = BloomCache(session)
|
||||
c.load_transactions_account(list_actors['alice'],0, 100)
|
||||
|
||||
assert b[0] == list_defaults['block'] - 1
|
@ -128,59 +128,57 @@ services:
|
||||
volumes:
|
||||
- contract-config:/tmp/cic/config
|
||||
|
||||
cic-cache-tracker:
|
||||
build:
|
||||
context: apps/
|
||||
dockerfile: cic-cache/docker/Dockerfile
|
||||
environment:
|
||||
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
|
||||
ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
|
||||
BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
|
||||
DATABASE_USER: ${DATABASE_USER:-grassroots}
|
||||
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
|
||||
DATABASE_HOST: ${DATABASE_HOST:-postgres}
|
||||
DATABASE_PORT: ${DATABASE_PORT:-5432}
|
||||
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic_cache}
|
||||
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
||||
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
||||
ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
depends_on:
|
||||
- postgres
|
||||
- eth
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
|
||||
/usr/local/bin/cic-cache-tracker -vv
|
||||
volumes:
|
||||
- contract-config:/tmp/cic/config/:ro
|
||||
entrypoint: ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
|
||||
# cic-cache-tracker:
|
||||
# # image: registry.gitlab.com/grassrootseconomics/cic-cache:master-latest
|
||||
# build: apps/cic-cache
|
||||
# environment:
|
||||
# CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
|
||||
# ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
|
||||
# BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
|
||||
# DATABASE_USER: ${DATABASE_USER:-grassroots}
|
||||
# DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
|
||||
# DATABASE_HOST: ${DATABASE_HOST:-postgres}
|
||||
# DATABASE_PORT: ${DATABASE_PORT:-5432}
|
||||
# DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic_cache}
|
||||
# DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
||||
# DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
||||
# ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
|
||||
# deploy:
|
||||
# restart_policy:
|
||||
# condition: on-failure
|
||||
# depends_on:
|
||||
# - postgres
|
||||
# - eth
|
||||
# command:
|
||||
# - /bin/sh
|
||||
# - -c
|
||||
# - |
|
||||
# if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
|
||||
# /usr/local/bin/cic-cache-tracker -vv
|
||||
# volumes:
|
||||
# - contract-config:/tmp/cic/config/:ro
|
||||
# entrypoint: ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# command: "/usr/local/bin/cic-cache-tracker -vv"
|
||||
|
||||
cic-cache-server:
|
||||
build:
|
||||
context: apps/
|
||||
dockerfile: cic-cache/docker/Dockerfile
|
||||
environment:
|
||||
DATABASE_USER: $DATABASE_USER
|
||||
DATABASE_HOST: $DATABASE_HOST
|
||||
DATABASE_PORT: $DATABASE_PORT
|
||||
DATABASE_PASSWORD: $DATABASE_PASSWORD
|
||||
DATABASE_NAME: $DATABASE_NAME_CIC_CACHE
|
||||
PGPASSWORD: $DATABASE_PASSWORD
|
||||
SERVER_PORT: 80
|
||||
ports:
|
||||
- ${HTTP_PORT_CIC_CACHE}:80
|
||||
depends_on:
|
||||
- postgres
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
command: "/root/start_uwsgi.sh"
|
||||
# cic-cache-server:
|
||||
# image: grassrootseconomics:cic-cache-uwsgi
|
||||
# environment:
|
||||
# DATABASE_USER: $DATABASE_USER
|
||||
# DATABASE_HOST: $DATABASE_HOST
|
||||
# DATABASE_PORT: $DATABASE_PORT
|
||||
# DATABASE_PASSWORD: $DATABASE_PASSWORD
|
||||
# DATABASE_NAME: $DATABASE_NAME_CIC_CACHE
|
||||
# PGPASSWORD: $DATABASE_PASSWORD
|
||||
# SERVER_PORT: 80
|
||||
# ports:
|
||||
# - ${HTTP_PORT_CIC_CACHE}:80
|
||||
# depends_on:
|
||||
# - postgres
|
||||
# deploy:
|
||||
# restart_policy:
|
||||
# condition: on-failure
|
||||
# command: "/root/start_uwsgi.sh"
|
||||
|
||||
cic-eth-tasker:
|
||||
# image: grassrootseconomics:cic-eth-service
|
||||
|
Loading…
Reference in New Issue
Block a user