add cache back in
This commit is contained in:
parent
39433d67da
commit
492faa87e5
2
apps/cic-cache/.config/bancor.ini
Normal file
2
apps/cic-cache/.config/bancor.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[bancor]
|
||||||
|
dir =
|
2
apps/cic-cache/.config/cic.ini
Normal file
2
apps/cic-cache/.config/cic.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[cic]
|
||||||
|
registry_address =
|
8
apps/cic-cache/.config/database.ini
Normal file
8
apps/cic-cache/.config/database.ini
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[database]
|
||||||
|
NAME=cic-eth
|
||||||
|
USER=postgres
|
||||||
|
PASSWORD=
|
||||||
|
HOST=localhost
|
||||||
|
PORT=5432
|
||||||
|
ENGINE=postgresql
|
||||||
|
DRIVER=psycopg2
|
6
apps/cic-cache/.config/eth.ini
Normal file
6
apps/cic-cache/.config/eth.ini
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[eth]
|
||||||
|
provider = ws://localhost:8545
|
||||||
|
#ttp_provider = http://localhost:8545
|
||||||
|
#provider = http://localhost:8545
|
||||||
|
gas_provider_address =
|
||||||
|
#chain_id =
|
2
apps/cic-cache/.config/test/bancor.ini
Normal file
2
apps/cic-cache/.config/test/bancor.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[bancor]
|
||||||
|
dir =
|
2
apps/cic-cache/.config/test/cic.ini
Normal file
2
apps/cic-cache/.config/test/cic.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[cic]
|
||||||
|
registry_address =
|
8
apps/cic-cache/.config/test/database.ini
Normal file
8
apps/cic-cache/.config/test/database.ini
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[database]
|
||||||
|
NAME=cic-cache-test
|
||||||
|
USER=postgres
|
||||||
|
PASSWORD=
|
||||||
|
HOST=localhost
|
||||||
|
PORT=5432
|
||||||
|
ENGINE=sqlite
|
||||||
|
DRIVER=pysqlite
|
5
apps/cic-cache/.config/test/eth.ini
Normal file
5
apps/cic-cache/.config/test/eth.ini
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[eth]
|
||||||
|
#ws_provider = ws://localhost:8546
|
||||||
|
#ttp_provider = http://localhost:8545
|
||||||
|
provider = http://localhost:8545
|
||||||
|
#chain_id =
|
5
apps/cic-cache/.coveragerc
Normal file
5
apps/cic-cache/.coveragerc
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[report]
|
||||||
|
omit =
|
||||||
|
.venv/*
|
||||||
|
scripts/*
|
||||||
|
cic_cache/db/postgres/*
|
7
apps/cic-cache/.envrc_example
Normal file
7
apps/cic-cache/.envrc_example
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
set -a
|
||||||
|
CICTEST_DATABASE_ENGINE=postgresql
|
||||||
|
CICTEST_DATABASE_DRIVER=psycopg2
|
||||||
|
CICTEST_DATABASE_HOST=localhost
|
||||||
|
CICTEST_DATABASE_PORT=5432
|
||||||
|
CICTEST_DATABASE_NAME=cic-eth-test
|
||||||
|
set +a
|
8
apps/cic-cache/.gitignore
vendored
Normal file
8
apps/cic-cache/.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.envrc
|
||||||
|
.envrc_dev
|
||||||
|
.venv
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
_build
|
||||||
|
doc/**/*.png
|
||||||
|
doc/**/html
|
42
apps/cic-cache/.gitlab-ci.yml
Normal file
42
apps/cic-cache/.gitlab-ci.yml
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
image: docker:19.03.13
|
||||||
|
|
||||||
|
variables:
|
||||||
|
# docker host
|
||||||
|
DOCKER_HOST: tcp://docker:2376
|
||||||
|
# container, thanks to volume mount from config.toml
|
||||||
|
DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
# These are usually specified by the entrypoint, however the
|
||||||
|
# Kubernetes executor doesn't run entrypoints
|
||||||
|
# https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4125
|
||||||
|
DOCKER_TLS_VERIFY: 1
|
||||||
|
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
|
||||||
|
|
||||||
|
services:
|
||||||
|
- docker:19.03.13-dind
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- docker info
|
||||||
|
|
||||||
|
build_merge_request:
|
||||||
|
stage: build
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
when: always
|
||||||
|
script:
|
||||||
|
- docker build -t $CI_PROJECT_PATH_SLUG:$CI_COMMIT_SHORT_SHA .
|
||||||
|
|
||||||
|
build_image:
|
||||||
|
stage: build
|
||||||
|
variables:
|
||||||
|
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||||
|
before_script:
|
||||||
|
- docker login -u $CI_DEPLOY_USER -p $CI_DEPLOY_PASSWORD $CI_REGISTRY
|
||||||
|
script:
|
||||||
|
- echo "$CI_REGISTRY_PASSWORD" | docker login -u "$CI_REGISTRY_USER" $CI_REGISTRY --password-stdin
|
||||||
|
- docker build -t $IMAGE_TAG .
|
||||||
|
- docker push $IMAGE_TAG
|
||||||
|
- docker tag $IMAGE_TAG $LATEST_TAG
|
||||||
|
- docker push $LATEST_TAG
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
|
11
apps/cic-cache/CHANGELOG
Normal file
11
apps/cic-cache/CHANGELOG
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
- 0.1.1
|
||||||
|
* Add missing modules to setup
|
||||||
|
- 0.1.0
|
||||||
|
* Remove old APIs
|
||||||
|
* Add bloom filter output APIs for all txs and per-account txs
|
||||||
|
- 0.0.2
|
||||||
|
* UWSGI server endpoint example
|
||||||
|
* OpenAPI spec
|
||||||
|
* stored procedures, test fixture for database schema
|
||||||
|
- 0.0.1
|
||||||
|
* Add json translators of transaction_list and balances stored procedure queries
|
39
apps/cic-cache/Dockerfile
Normal file
39
apps/cic-cache/Dockerfile
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-bancor:master-07951c84
|
||||||
|
|
||||||
|
ARG pip_extra_index_url_flag='--extra-index-url https://pip.grassrootseconomics.net:8433'
|
||||||
|
|
||||||
|
RUN apk add postgresql-dev linux-headers git
|
||||||
|
# RUN apk add linux-headers
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
RUN mkdir -vp cic-cache
|
||||||
|
|
||||||
|
COPY requirements.txt cic-cache/
|
||||||
|
COPY setup.* cic-cache/
|
||||||
|
COPY cic_cache/ cic-cache/cic_cache/
|
||||||
|
COPY scripts/ cic-cache/scripts/
|
||||||
|
RUN cd cic-cache && \
|
||||||
|
pip install $pip_extra_index_url_flag .
|
||||||
|
|
||||||
|
RUN cd cic-cache && \
|
||||||
|
pip install .[server]
|
||||||
|
|
||||||
|
COPY tests/ cic-cache/tests/
|
||||||
|
#COPY db/ cic-cache/db
|
||||||
|
#RUN apk add postgresql-client
|
||||||
|
|
||||||
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
|
# they can all be overridden by environment variables
|
||||||
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
COPY .config/ /usr/local/etc/cic-cache/
|
||||||
|
|
||||||
|
# for db migrations
|
||||||
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
|
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
|
|
||||||
|
# Tracker
|
||||||
|
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||||
|
# Server
|
||||||
|
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
0
apps/cic-cache/README.md
Normal file
0
apps/cic-cache/README.md
Normal file
1
apps/cic-cache/cic_cache/__init__.py
Normal file
1
apps/cic-cache/cic_cache/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .cache import BloomCache
|
89
apps/cic-cache/cic_cache/cache.py
Normal file
89
apps/cic-cache/cic_cache/cache.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import moolb
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import list_transactions_mined
|
||||||
|
from cic_cache.db import list_transactions_account_mined
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class BloomCache:
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, session):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_filter_size(n):
|
||||||
|
n = 8192 * 8
|
||||||
|
logg.warning('filter size hardcoded to {}'.format(n))
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def load_transactions(self, offset, limit):
|
||||||
|
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
||||||
|
|
||||||
|
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
||||||
|
|
||||||
|
For example, if the block number is 13 and the transaction index is 42, the input are:
|
||||||
|
|
||||||
|
block filter: 0x0d000000
|
||||||
|
block+tx filter: 0x0d0000002a0000000
|
||||||
|
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
rows = list_transactions_mined(self.session, offset, limit)
|
||||||
|
|
||||||
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
|
highest_block = -1
|
||||||
|
lowest_block = -1
|
||||||
|
for r in rows:
|
||||||
|
if highest_block == -1:
|
||||||
|
highest_block = r[0]
|
||||||
|
lowest_block = r[0]
|
||||||
|
block = r[0].to_bytes(4, byteorder='big')
|
||||||
|
tx = r[1].to_bytes(4, byteorder='big')
|
||||||
|
f_block.add(block)
|
||||||
|
f_blocktx.add(block + tx)
|
||||||
|
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
||||||
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||||
|
|
||||||
|
|
||||||
|
def load_transactions_account(self, address, offset, limit):
|
||||||
|
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
||||||
|
|
||||||
|
:param address: Address to retrieve transactions for.
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
rows = list_transactions_account_mined(self.session, address, offset, limit)
|
||||||
|
|
||||||
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
|
highest_block = -1;
|
||||||
|
lowest_block = -1;
|
||||||
|
for r in rows:
|
||||||
|
if highest_block == -1:
|
||||||
|
highest_block = r[0]
|
||||||
|
lowest_block = r[0]
|
||||||
|
block = r[0].to_bytes(4, byteorder='big')
|
||||||
|
tx = r[1].to_bytes(4, byteorder='big')
|
||||||
|
f_block.add(block)
|
||||||
|
f_blocktx.add(block + tx)
|
||||||
|
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
||||||
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
35
apps/cic-cache/cic_cache/db/__init__.py
Normal file
35
apps/cic-cache/cic_cache/db/__init__.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .list import list_transactions_mined
|
||||||
|
from .list import list_transactions_account_mined
|
||||||
|
from .list import add_transaction
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def dsn_from_config(config):
|
||||||
|
scheme = config.get('DATABASE_ENGINE')
|
||||||
|
if config.get('DATABASE_DRIVER') != None:
|
||||||
|
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
|
||||||
|
|
||||||
|
dsn = ''
|
||||||
|
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||||
|
dsn = '{}:///{}'.format(
|
||||||
|
scheme,
|
||||||
|
config.get('DATABASE_NAME'),
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
dsn = '{}://{}:{}@{}:{}/{}'.format(
|
||||||
|
scheme,
|
||||||
|
config.get('DATABASE_USER'),
|
||||||
|
config.get('DATABASE_PASSWORD'),
|
||||||
|
config.get('DATABASE_HOST'),
|
||||||
|
config.get('DATABASE_PORT'),
|
||||||
|
config.get('DATABASE_NAME'),
|
||||||
|
)
|
||||||
|
logg.debug('parsed dsn from config: {}'.format(dsn))
|
||||||
|
return dsn
|
||||||
|
|
56
apps/cic-cache/cic_cache/db/list.py
Normal file
56
apps/cic-cache/cic_cache/db/list.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def list_transactions_mined(session, offset, limit):
|
||||||
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||||
|
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def list_transactions_account_mined(session, address, offset, limit):
|
||||||
|
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
||||||
|
|
||||||
|
:param address: Address to retrieve transactions for.
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def add_transaction(session, tx_hash, block_number, tx_index, sender, receiver, source_token, destination_token, success, timestamp):
|
||||||
|
date_block = datetime.datetime.fromtimestamp(timestamp)
|
||||||
|
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, '{}')".format(
|
||||||
|
tx_hash,
|
||||||
|
block_number,
|
||||||
|
tx_index,
|
||||||
|
sender,
|
||||||
|
receiver,
|
||||||
|
source_token,
|
||||||
|
destination_token,
|
||||||
|
success,
|
||||||
|
date_block,
|
||||||
|
)
|
||||||
|
session.execute(s)
|
1
apps/cic-cache/cic_cache/db/migrations/default/README
Normal file
1
apps/cic-cache/cic_cache/db/migrations/default/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
86
apps/cic-cache/cic_cache/db/migrations/default/alembic.ini
Normal file
86
apps/cic-cache/cic_cache/db/migrations/default/alembic.ini
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = .
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to ./versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat ./versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
#sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-cache
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks=black
|
||||||
|
# black.type=console_scripts
|
||||||
|
# black.entrypoint=black
|
||||||
|
# black.options=-l 79
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
77
apps/cic-cache/cic_cache/db/migrations/default/env.py
Normal file
77
apps/cic-cache/cic_cache/db/migrations/default/env.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,44 @@
|
|||||||
|
"""Base tables
|
||||||
|
|
||||||
|
Revision ID: 63b629f14a85
|
||||||
|
Revises:
|
||||||
|
Create Date: 2020-12-04 08:16:00.412189
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '63b629f14a85'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tx',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('date_registered', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('block_number', sa.Integer, nullable=False),
|
||||||
|
sa.Column('tx_index', sa.Integer, nullable=False),
|
||||||
|
sa.Column('tx_hash', sa.String(66), nullable=False),
|
||||||
|
sa.Column('sender', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient', sa.String(42), nullable=False),
|
||||||
|
sa.Column('source_token', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient_token', sa.String(42), nullable=False),
|
||||||
|
sa.Column('success', sa.Boolean, nullable=False),
|
||||||
|
sa.Column('date_block', sa.DateTime, nullable=False),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
'tx_sync',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tx', sa.String(66), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.execute("INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');")
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tx_sync')
|
||||||
|
op.drop_table('tx')
|
0
apps/cic-cache/cic_cache/db/models/__init__.py
Normal file
0
apps/cic-cache/cic_cache/db/models/__init__.py
Normal file
48
apps/cic-cache/cic_cache/db/models/base.py
Normal file
48
apps/cic-cache/cic_cache/db/models/base.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, Integer
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
Model = declarative_base(name='Model')
|
||||||
|
|
||||||
|
|
||||||
|
class SessionBase(Model):
|
||||||
|
__abstract__ = True
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
engine = None
|
||||||
|
query = None
|
||||||
|
sessionmaker = None
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_session():
|
||||||
|
#SessionBase.session = session()
|
||||||
|
#return SessionBase.session
|
||||||
|
return SessionBase.sessionmaker()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _set_engine(engine):
|
||||||
|
SessionBase.engine = engine
|
||||||
|
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build():
|
||||||
|
Model.metadata.create_all(bind=SessionBase.engine)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def connect(dsn):
|
||||||
|
e = create_engine(dsn)
|
||||||
|
SessionBase._set_engine(e)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def disconnect():
|
||||||
|
SessionBase.engine.dispose()
|
||||||
|
SessionBase.session = None
|
||||||
|
SessionBase.engine = None
|
141
apps/cic-cache/cic_cache/runnable/server.py
Normal file
141
apps/cic-cache/cic_cache/runnable/server.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache import BloomCache
|
||||||
|
from cic_cache.db import dsn_from_config
|
||||||
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
config = confini.Config(args.c, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config:\n{}'.format(config))
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||||
|
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
||||||
|
|
||||||
|
DEFAULT_LIMIT = 100
|
||||||
|
|
||||||
|
|
||||||
|
def process_transactions_account_bloom(session, env):
|
||||||
|
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
|
||||||
|
address = r[1]
|
||||||
|
if r[2] == None:
|
||||||
|
address = '0x' + address
|
||||||
|
offset = DEFAULT_LIMIT
|
||||||
|
if r.lastindex > 2:
|
||||||
|
offset = r[3]
|
||||||
|
limit = 0
|
||||||
|
if r.lastindex > 3:
|
||||||
|
limit = r[4]
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'alg': 'sha256',
|
||||||
|
'low': lowest_block,
|
||||||
|
'high': highest_block,
|
||||||
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||||
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||||
|
'filter_rounds': 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
j = json.dumps(o)
|
||||||
|
|
||||||
|
return ('application/json', j.encode('utf-8'),)
|
||||||
|
|
||||||
|
|
||||||
|
def process_transactions_all_bloom(session, env):
|
||||||
|
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
|
||||||
|
offset = DEFAULT_LIMIT
|
||||||
|
if r.lastindex > 0:
|
||||||
|
offset = r[1]
|
||||||
|
limit = 0
|
||||||
|
if r.lastindex > 1:
|
||||||
|
limit = r[2]
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'alg': 'sha256',
|
||||||
|
'low': lowest_block,
|
||||||
|
'high': highest_block,
|
||||||
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||||
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||||
|
'filter_rounds': 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
j = json.dumps(o)
|
||||||
|
|
||||||
|
return ('application/json', j.encode('utf-8'),)
|
||||||
|
|
||||||
|
|
||||||
|
# uwsgi application
|
||||||
|
def application(env, start_response):
|
||||||
|
|
||||||
|
headers = []
|
||||||
|
content = b''
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
for handler in [
|
||||||
|
process_transactions_all_bloom,
|
||||||
|
process_transactions_account_bloom,
|
||||||
|
]:
|
||||||
|
r = handler(session, env)
|
||||||
|
if r != None:
|
||||||
|
(mime_type, content) = r
|
||||||
|
break
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
headers.append(('Content-Length', str(len(content))),)
|
||||||
|
headers.append(('Access-Control-Allow-Origin', '*',));
|
||||||
|
|
||||||
|
if len(content) == 0:
|
||||||
|
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||||
|
start_response('404 Looked everywhere, sorry', headers)
|
||||||
|
else:
|
||||||
|
headers.append(('Content-Type', mime_type,))
|
||||||
|
start_response('200 OK', headers)
|
||||||
|
|
||||||
|
return [content]
|
284
apps/cic-cache/cic_cache/runnable/tracker.py
Normal file
284
apps/cic-cache/cic_cache/runnable/tracker.py
Normal file
@ -0,0 +1,284 @@
|
|||||||
|
# standard imports
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import enum
|
||||||
|
import re
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.bancor import BancorRegistry
|
||||||
|
from cic_registry.token import Token
|
||||||
|
from cic_registry.error import UnknownContractError
|
||||||
|
from web3.exceptions import BlockNotFound, TransactionNotFound
|
||||||
|
from websockets.exceptions import ConnectionClosedError
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
import web3
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache import db
|
||||||
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
log_topics = {
|
||||||
|
'transfer': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
|
||||||
|
'convert': '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095',
|
||||||
|
}
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
# connect to database
|
||||||
|
dsn = db.dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
|
||||||
|
|
||||||
|
class RunStateEnum(enum.IntEnum):
|
||||||
|
INIT = 0
|
||||||
|
RUN = 1
|
||||||
|
TERMINATE = 9
|
||||||
|
|
||||||
|
|
||||||
|
class Tracker:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.block_height = 0
|
||||||
|
self.tx_height = 0
|
||||||
|
self.state = RunStateEnum.INIT
|
||||||
|
|
||||||
|
|
||||||
|
def refresh_registry(self, w3):
|
||||||
|
cr = CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'ConverterRegistry')
|
||||||
|
f = cr.function('getConvertibleTokens')
|
||||||
|
anchors = f().call()
|
||||||
|
# TODO: if there are other token sources, this number may not match anymore. The cache count method should be moved to bancorregistry object instead
|
||||||
|
r = CICRegistry.get_chain_registry(CICRegistry.bancor_chain_spec)
|
||||||
|
#logg.debug('anchors {} {}'.format(anchors, ContractRegistry.cache_token_count()))
|
||||||
|
if len(anchors) != r.cache_token_count():
|
||||||
|
logg.debug('token count mismatch, scanning')
|
||||||
|
|
||||||
|
for a in anchors:
|
||||||
|
if ContractRegistry.get_address(a) == None:
|
||||||
|
abi = CICRegistry.abi('ERC20Token')
|
||||||
|
#abi = ContractRegistry.contracts['ERC20Token'].contract.abi
|
||||||
|
c = w3.eth.contract(address=a, abi=abi)
|
||||||
|
t = ContractRegistry.add_token(a, c)
|
||||||
|
logg.info('new token {} at {}'.format(t.symbol(), t.address))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def __process_tx(self, w3, session, t, r, l, b):
|
||||||
|
token_value = int(l.data, 16)
|
||||||
|
token_sender = l.topics[1][-20:].hex()
|
||||||
|
token_recipient = l.topics[2][-20:].hex()
|
||||||
|
|
||||||
|
ts = ContractRegistry.get_address(t.address)
|
||||||
|
logg.info('add token transfer {} value {} from {} to {}'.format(
|
||||||
|
ts.symbol(),
|
||||||
|
token_value,
|
||||||
|
token_sender,
|
||||||
|
token_recipient,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
logg.debug('block', b)
|
||||||
|
db.add_transaction(
|
||||||
|
session,
|
||||||
|
r.transactionHash.hex(),
|
||||||
|
r.blockNumber,
|
||||||
|
r.transactionIndex,
|
||||||
|
w3.toChecksumAddress(token_sender),
|
||||||
|
w3.toChecksumAddress(token_recipient),
|
||||||
|
t.address,
|
||||||
|
t.address,
|
||||||
|
r.status == 1,
|
||||||
|
b.timestamp,
|
||||||
|
)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: simplify/ split up and/or comment, function is too long
|
||||||
|
def __process_convert(self, w3, session, t, r, l, b):
|
||||||
|
token_source = l.topics[2][-20:].hex()
|
||||||
|
token_source = w3.toChecksumAddress(token_source)
|
||||||
|
token_destination = l.topics[3][-20:].hex()
|
||||||
|
token_destination = w3.toChecksumAddress(token_destination)
|
||||||
|
data_noox = l.data[2:]
|
||||||
|
d = data_noox[:64]
|
||||||
|
token_from_value = int(d, 16)
|
||||||
|
d = data_noox[64:128]
|
||||||
|
token_to_value = int(d, 16)
|
||||||
|
token_trader = '0x' + data_noox[192-40:]
|
||||||
|
|
||||||
|
ts = ContractRegistry.get_address(token_source)
|
||||||
|
if ts == None:
|
||||||
|
ts = ContractRegistry.reserves[token_source]
|
||||||
|
td = ContractRegistry.get_address(token_destination)
|
||||||
|
if td == None:
|
||||||
|
td = ContractRegistry.reserves[token_source]
|
||||||
|
logg.info('add token convert {} -> {} value {} -> {} trader {}'.format(
|
||||||
|
ts.symbol(),
|
||||||
|
td.symbol(),
|
||||||
|
token_from_value,
|
||||||
|
token_to_value,
|
||||||
|
token_trader,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add_transaction(
|
||||||
|
session,
|
||||||
|
r.transactionHash.hex(),
|
||||||
|
r.blockNumber,
|
||||||
|
r.transactionIndex,
|
||||||
|
w3.toChecksumAddress(token_trader),
|
||||||
|
w3.toChecksumAddress(token_trader),
|
||||||
|
token_source,
|
||||||
|
token_destination,
|
||||||
|
r.status == 1,
|
||||||
|
b.timestamp,
|
||||||
|
)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def process(self, w3, session, block):
|
||||||
|
self.refresh_registry(w3)
|
||||||
|
tx_count = w3.eth.getBlockTransactionCount(block.hash)
|
||||||
|
b = w3.eth.getBlock(block.hash)
|
||||||
|
for i in range(self.tx_height, tx_count):
|
||||||
|
tx = w3.eth.getTransactionByBlock(block.hash, i)
|
||||||
|
t = None
|
||||||
|
try:
|
||||||
|
t = CICRegistry.get_address(CICRegistry.bancor_chain_spec, tx.to)
|
||||||
|
except UnknownContractError:
|
||||||
|
logg.debug('block {} tx {} not our contract, skipping'.format(block, i))
|
||||||
|
continue
|
||||||
|
logg.debug('block tx {} {}'.format(block.number, i))
|
||||||
|
if t != None and isinstance(t, Token):
|
||||||
|
r = w3.eth.getTransactionReceipt(tx.hash)
|
||||||
|
for l in r.logs:
|
||||||
|
logg.info('{} token log {} {}'.format(tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
||||||
|
if l.topics[0].hex() == log_topics['transfer']:
|
||||||
|
self.__process_tx(w3, session, t, r, l, b)
|
||||||
|
|
||||||
|
elif tx.to == CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'BancorNetwork').address:
|
||||||
|
r = w3.eth.getTransactionReceipt(tx.hash)
|
||||||
|
for l in r.logs:
|
||||||
|
logg.info('{} bancornetwork log {} {}'.format(tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
||||||
|
if l.topics[0].hex() == log_topics['convert']:
|
||||||
|
self.__process_convert(w3, session, t, r, l, b)
|
||||||
|
|
||||||
|
|
||||||
|
session.execute("UPDATE tx_sync SET tx = '{}'".format(tx.hash.hex()))
|
||||||
|
session.commit()
|
||||||
|
self.tx_height += 1
|
||||||
|
|
||||||
|
|
||||||
|
def __get_next_retry(self, backoff=False):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def loop(self, bancor_registry):
|
||||||
|
logg.info('starting at block {} tx index {}'.format(self.block_height, self.tx_height))
|
||||||
|
self.state = RunStateEnum.RUN
|
||||||
|
while self.state == RunStateEnum.RUN:
|
||||||
|
(provider, w3) = web3_constructor()
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
try:
|
||||||
|
block = w3.eth.getBlock(self.block_height)
|
||||||
|
self.process(w3, session, block)
|
||||||
|
self.block_height += 1
|
||||||
|
self.tx_height = 0
|
||||||
|
except BlockNotFound as e:
|
||||||
|
logg.debug('no block {} yet, zZzZ...'.format(self.block_height))
|
||||||
|
time.sleep(self.__get_next_retry())
|
||||||
|
except ConnectionClosedError as e:
|
||||||
|
logg.info('connection gone, retrying')
|
||||||
|
time.sleep(self.__get_next_retry(True))
|
||||||
|
except OSError as e:
|
||||||
|
logg.error('cannot connect {}'.format(e))
|
||||||
|
time.sleep(self.__get_next_retry(True))
|
||||||
|
except Exception as e:
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def load(self, w3):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
r = session.execute('SELECT tx FROM tx_sync').first()
|
||||||
|
if r != None:
|
||||||
|
if r[0] == '0x{0:0{1}X}'.format(0, 64):
|
||||||
|
logg.debug('last tx was zero-address, starting from scratch')
|
||||||
|
return
|
||||||
|
t = w3.eth.getTransaction(r[0])
|
||||||
|
|
||||||
|
self.block_height = t.blockNumber
|
||||||
|
self.tx_height = t.transactionIndex+1
|
||||||
|
c = w3.eth.getBlockTransactionCount(t.blockHash.hex())
|
||||||
|
logg.debug('last tx processed {} index {} (max index {})'.format(t.blockNumber, t.transactionIndex, c-1))
|
||||||
|
if c == self.tx_height:
|
||||||
|
self.block_height += 1
|
||||||
|
self.tx_height = 0
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
(provider, w3) = web3_constructor()
|
||||||
|
CICRegistry.finalize(w3, config.get('CIC_REGISTRY_ADDRESS'))
|
||||||
|
bancor_registry_contract = CICRegistry.get_contract(CICRegistry.bancor_chain_spec, 'BancorRegistry')
|
||||||
|
bancor_chain_registry = CICRegistry.get_chain_registry(CICRegistry.bancor_chain_spec)
|
||||||
|
bancor_registry = BancorRegistry(w3, bancor_chain_registry, bancor_registry_contract.address(), config.get('BANCOR_DIR'))
|
||||||
|
bancor_registry.load()
|
||||||
|
|
||||||
|
#bancor.load(w3, config.get('BANCOR_REGISTRY_ADDRESS'), config.get('BANCOR_DIR'))
|
||||||
|
|
||||||
|
t = Tracker()
|
||||||
|
t.load(w3)
|
||||||
|
t.loop(bancor_registry)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
5
apps/cic-cache/db/initdb_files/create_all_db.sql
Normal file
5
apps/cic-cache/db/initdb_files/create_all_db.sql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
CREATE DATABASE "cic-cache";
|
||||||
|
CREATE DATABASE "cic-eth";
|
||||||
|
CREATE DATABASE "cic-notify";
|
||||||
|
CREATE DATABASE "cic-meta";
|
||||||
|
CREATE DATABASE "cic-signer";
|
20
apps/cic-cache/db/psycopg2/db.sql
Normal file
20
apps/cic-cache/db/psycopg2/db.sql
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
CREATE TABLE tx (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
date_registered TIMESTAMP NOT NULL default CURRENT_TIMESTAMP,
|
||||||
|
block_number INTEGER NOT NULL,
|
||||||
|
tx_index INTEGER NOT NULL,
|
||||||
|
tx_hash VARCHAR(66) NOT NULL,
|
||||||
|
sender VARCHAR(42) NOT NULL,
|
||||||
|
recipient VARCHAR(42) NOT NULL,
|
||||||
|
source_token VARCHAR(42) NOT NULL,
|
||||||
|
destination_token VARCHAR(42) NOT NULL,
|
||||||
|
success BOOLEAN NOT NULL,
|
||||||
|
date_block TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE tx_sync (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
tx VARCHAR(66) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');
|
21
apps/cic-cache/db/pysqlite/db.sql
Normal file
21
apps/cic-cache/db/pysqlite/db.sql
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
CREATE TABLE tx (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
date_registered DATETIME NOT NULL default CURRENT_DATE,
|
||||||
|
block_number INTEGER NOT NULL,
|
||||||
|
tx_index INTEGER NOT NULL,
|
||||||
|
tx_hash VARCHAR(66) NOT NULL,
|
||||||
|
sender VARCHAR(42) NOT NULL,
|
||||||
|
recipient VARCHAR(42) NOT NULL,
|
||||||
|
source_token VARCHAR(42) NOT NULL,
|
||||||
|
destination_token VARCHAR(42) NOT NULL,
|
||||||
|
success BOOLEAN NOT NULL,
|
||||||
|
date_block DATETIME NOT NULL,
|
||||||
|
CHECK (success IN (0, 1))
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE tx_sync (
|
||||||
|
id SERIAL PRIMARY_KEY,
|
||||||
|
tx VARCHAR(66) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO tx_sync (tx) VALUES('0x0000000000000000000000000000000000000000000000000000000000000000');
|
102
apps/cic-cache/doc/openapi/server.yml
Normal file
102
apps/cic-cache/doc/openapi/server.yml
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
openapi: "3.0.3"
|
||||||
|
info:
|
||||||
|
title: Grassroots Economics CIC Cache
|
||||||
|
description: Cache of processed transaction data from Ethereum blockchain and worker queues
|
||||||
|
termsOfService: bzz://grassrootseconomics.eth/terms
|
||||||
|
contact:
|
||||||
|
name: Grassroots Economics
|
||||||
|
url: https://www.grassrootseconomics.org
|
||||||
|
email: will@grassecon.org
|
||||||
|
license:
|
||||||
|
name: GPLv3
|
||||||
|
version: 0.1.0
|
||||||
|
|
||||||
|
paths:
|
||||||
|
/tx/{offset}/{limit}:
|
||||||
|
description: Bloom filter for batch of latest transactions
|
||||||
|
get:
|
||||||
|
tags:
|
||||||
|
- transactions
|
||||||
|
description:
|
||||||
|
Retrieve transactions
|
||||||
|
operationId: tx.get
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Transaction query successful.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BlocksBloom"
|
||||||
|
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
- name: offset
|
||||||
|
in: path
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
- name: limit
|
||||||
|
in: path
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
|
|
||||||
|
/tx/{address}/{offset}/{limit}:
|
||||||
|
description: Bloom filter for batch of latest transactions by account
|
||||||
|
get:
|
||||||
|
tags:
|
||||||
|
- transactions
|
||||||
|
description:
|
||||||
|
Retrieve transactions
|
||||||
|
operationId: tx.get
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Transaction query successful.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BlocksBloom"
|
||||||
|
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
- name: address
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: offset
|
||||||
|
in: path
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
- name: limit
|
||||||
|
in: path
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
|
components:
|
||||||
|
schemas:
|
||||||
|
BlocksBloom:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
low:
|
||||||
|
type: int
|
||||||
|
format: int32
|
||||||
|
description: The lowest block number included in the filter
|
||||||
|
block_filter:
|
||||||
|
type: string
|
||||||
|
format: byte
|
||||||
|
description: Block number filter
|
||||||
|
blocktx_filter:
|
||||||
|
type: string
|
||||||
|
format: byte
|
||||||
|
description: Block and tx index filter
|
||||||
|
alg:
|
||||||
|
type: string
|
||||||
|
description: Hashing algorithm (currently only using sha256)
|
||||||
|
filter_rounds:
|
||||||
|
type: int
|
||||||
|
format: int32
|
||||||
|
description: Number of hash rounds used to create the filter
|
84
apps/cic-cache/docker-compose.yaml
Normal file
84
apps/cic-cache/docker-compose.yaml
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
version: "3"
|
||||||
|
volumes:
|
||||||
|
data-volume: {} # an empty dir that can be earased by dropping the volume: docker-compose down -v
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:12.5-alpine
|
||||||
|
container_name: cic_postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${DATABASE_USER:-postgres}
|
||||||
|
POSTGRES_PASSWORD: ${DATABASE_PASSWORD:-password}
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
ports:
|
||||||
|
- 63432:5432
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- ./db/initdb_files/create_all_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql #note these run in order
|
||||||
|
- data-volume:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
db-migration:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
environment:
|
||||||
|
ETH_PROVIDER: ${ETH_PROVIDER:-wss://bloxberg-ws.dev.grassrootseconomics.net/}
|
||||||
|
# CIC_REGISTRY_ADDRESS: ${CIC_REGISTRY_ADDRESS:?err} # TODO are there any public addresses we can use for a default?
|
||||||
|
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS
|
||||||
|
BANCOR_DIR: $BANCOR_DIR # is this required?
|
||||||
|
DATABASE_HOST: postgres # because it's set in the postgres service block as well as port
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_USER: ${DATABASE_USER:-postgres}
|
||||||
|
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-password}
|
||||||
|
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic-cache}
|
||||||
|
DATABASE_ENGINE: postgres
|
||||||
|
DATABASE_DRIVER: psycopg2
|
||||||
|
command: [ "/usr/local/bin/wait-for-it/wait-for-it.sh", "postgres:5432", "--",
|
||||||
|
"migrate.py", "-c", "/usr/local/etc/cic-cache", "--migrations-dir",
|
||||||
|
"/usr/local/share/cic-cache/alembic", "-vv" ]
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
|
||||||
|
cic-cache-tracker:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
# https://docs.docker.com/compose/compose-file/#variable-substitution
|
||||||
|
environment:
|
||||||
|
ETH_PROVIDER: ${ETH_PROVIDER:-wss://bloxberg-ws.dev.grassrootseconomics.net/}
|
||||||
|
# CIC_REGISTRY_ADDRESS: ${CIC_REGISTRY_ADDRESS:?err} # TODO are there any public addresses we can use for a default?
|
||||||
|
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS
|
||||||
|
BANCOR_DIR: $BANCOR_DIR # is this required?
|
||||||
|
DATABASE_HOST: postgres # because it's set in the postgres service block as well as port
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_USER: ${DATABASE_USER:-postgres}
|
||||||
|
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-password}
|
||||||
|
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-"cic-cache"}
|
||||||
|
DATABASE_ENGINE: postgres
|
||||||
|
DATABASE_DRIVER: psycopg2
|
||||||
|
# deploy:
|
||||||
|
# restart_policy:
|
||||||
|
# condition: on-failure
|
||||||
|
entrypoint: ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- db-migration
|
||||||
|
|
||||||
|
cic-cache-server:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
environment:
|
||||||
|
DATABASE_HOST: ${DATABASE_HOST:-postgres}
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_USER: ${DATABASE_USER:-postgres}
|
||||||
|
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-password}
|
||||||
|
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-"cic-cache"}
|
||||||
|
SERVER_PORT: 80
|
||||||
|
ports:
|
||||||
|
- 63313:80
|
||||||
|
# deploy:
|
||||||
|
# restart_policy:
|
||||||
|
# condition: on-failure
|
||||||
|
entrypoint: [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- db-migration
|
3616
apps/cic-cache/examples/bloom_client/package-lock.json
generated
Normal file
3616
apps/cic-cache/examples/bloom_client/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
40
apps/cic-cache/examples/bloom_client/parse.js
Normal file
40
apps/cic-cache/examples/bloom_client/parse.js
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
let xmlhttprequest = require('xhr2');
|
||||||
|
let moolb = require('moolb');
|
||||||
|
|
||||||
|
let xhr = new xmlhttprequest();
|
||||||
|
xhr.responseType = 'json';
|
||||||
|
xhr.open('GET', 'http://localhost:5555/tx/0/100');
|
||||||
|
xhr.addEventListener('load', (e) => {
|
||||||
|
|
||||||
|
d = xhr.response;
|
||||||
|
|
||||||
|
b_one = Buffer.from(d.block_filter, 'base64');
|
||||||
|
b_two = Buffer.from(d.blocktx_filter, 'base64');
|
||||||
|
|
||||||
|
for (let i = 0; i < 8192; i++) {
|
||||||
|
if (b_two[i] > 0) {
|
||||||
|
console.debug('value on', i, b_two[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log(b_one, b_two);
|
||||||
|
|
||||||
|
let f_block = moolb.fromBytes(b_one, d.filter_rounds);
|
||||||
|
let f_blocktx = moolb.fromBytes(b_two, d.filter_rounds);
|
||||||
|
let a = new ArrayBuffer(8);
|
||||||
|
let w = new DataView(a);
|
||||||
|
for (let i = 410000; i < 430000; i++) {
|
||||||
|
w.setInt32(0, i);
|
||||||
|
let r = new Uint8Array(a.slice(0, 4));
|
||||||
|
if (f_block.check(r)) {
|
||||||
|
for (let j = 0; j < 200; j++) {
|
||||||
|
w = new DataView(a);
|
||||||
|
w.setInt32(4, j);
|
||||||
|
r = new Uint8Array(a);
|
||||||
|
if (f_blocktx.check(r)) {
|
||||||
|
console.log('true', i, j);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let r = xhr.send();
|
6
apps/cic-cache/requirements.txt
Normal file
6
apps/cic-cache/requirements.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
alembic==1.4.2
|
||||||
|
confini==0.3.1
|
||||||
|
uwsgi==2.0.19.1
|
||||||
|
moolb==0.1.0
|
||||||
|
cic-registry==0.3.8
|
||||||
|
SQLAlchemy==1.3.19
|
56
apps/cic-cache/scripts/migrate.py
Normal file
56
apps/cic-cache/scripts/migrate.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import alembic
|
||||||
|
from alembic.config import Config as AlembicConfig
|
||||||
|
import confini
|
||||||
|
|
||||||
|
from cic_cache.db import dsn_from_config
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
# BUG: the dbdir doesn't work after script install
|
||||||
|
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
config = confini.Config(args.c, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config:\n{}'.format(config))
|
||||||
|
|
||||||
|
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
|
||||||
|
if not os.path.isdir(migrations_dir):
|
||||||
|
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||||
|
migrations_dir = os.path.join(args.migrations_dir, 'default')
|
||||||
|
|
||||||
|
# connect to database
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
|
||||||
|
|
||||||
|
logg.info('using migrations dir {}'.format(migrations_dir))
|
||||||
|
logg.info('using db {}'.format(dsn))
|
||||||
|
ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
||||||
|
ac.set_main_option('sqlalchemy.url', dsn)
|
||||||
|
ac.set_main_option('script_location', migrations_dir)
|
||||||
|
|
||||||
|
alembic.command.upgrade(ac, 'head')
|
55
apps/cic-cache/setup.cfg
Normal file
55
apps/cic-cache/setup.cfg
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cic-cache
|
||||||
|
version = 0.1.1
|
||||||
|
description = CIC Cache API and server
|
||||||
|
author = Louis Holbrook
|
||||||
|
author_email = dev@holbrook.no
|
||||||
|
url = https://gitlab.com/grassrootseconomics/cic-eth
|
||||||
|
keywords =
|
||||||
|
cic
|
||||||
|
cryptocurrency
|
||||||
|
ethereum
|
||||||
|
classifiers =
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Operating System :: OS Independent
|
||||||
|
Development Status :: 3 - Alpha
|
||||||
|
Environment :: No Input/Output (Daemon)
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||||
|
Topic :: Internet
|
||||||
|
# Topic :: Blockchain :: EVM
|
||||||
|
license = GPL3
|
||||||
|
licence_files =
|
||||||
|
LICENSE.txt
|
||||||
|
|
||||||
|
[options]
|
||||||
|
python_requires = >= 3.6
|
||||||
|
packages =
|
||||||
|
cic_cache
|
||||||
|
cic_cache.db
|
||||||
|
cic_cache.db.models
|
||||||
|
cic_cache.runnable
|
||||||
|
install_requires =
|
||||||
|
alembic==1.4.2
|
||||||
|
web3==5.12.2
|
||||||
|
confini==0.3.2
|
||||||
|
cic-registry==0.3.9
|
||||||
|
moolb==0.1.0
|
||||||
|
SQLAlchemy==1.3.19
|
||||||
|
psycopg2==2.8.6
|
||||||
|
tests_require =
|
||||||
|
pytest==6.0.1
|
||||||
|
pytest-cov==2.10.1
|
||||||
|
pysqlite==0.4.3
|
||||||
|
sqlparse==0.4.1
|
||||||
|
dependency_links =
|
||||||
|
https://pip.grassrootseconomics.net:8433/cic-registry
|
||||||
|
scripts =
|
||||||
|
./scripts/migrate.py
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
server = uWSGI==2.0.19.1
|
||||||
|
|
||||||
|
[options.entry_points]
|
||||||
|
console_scripts =
|
||||||
|
cic-cache-tracker = cic_cache.runnable.tracker:main
|
4
apps/cic-cache/setup.py
Normal file
4
apps/cic-cache/setup.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
setup(
|
||||||
|
)
|
5
apps/cic-cache/test_requirements.txt
Normal file
5
apps/cic-cache/test_requirements.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
pytest==6.0.1
|
||||||
|
pytest-cov==2.10.1
|
||||||
|
pytest-mock==3.3.1
|
||||||
|
pysqlite3==0.4.3
|
||||||
|
sqlparse==0.4.1
|
26
apps/cic-cache/tests/conftest.py
Normal file
26
apps/cic-cache/tests/conftest.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
root_dir = os.path.dirname(script_dir)
|
||||||
|
sys.path.insert(0, root_dir)
|
||||||
|
|
||||||
|
# fixtures
|
||||||
|
from test.fixtures_config import *
|
||||||
|
from test.fixtures_database import *
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def balances_dict_fields():
|
||||||
|
return {
|
||||||
|
'out_pending': 0,
|
||||||
|
'out_synced': 1,
|
||||||
|
'out_confirmed': 2,
|
||||||
|
'in_pending': 3,
|
||||||
|
'in_synced': 4,
|
||||||
|
'in_confirmed': 5,
|
||||||
|
}
|
20
apps/cic-cache/tests/fixtures_config.py
Normal file
20
apps/cic-cache/tests/fixtures_config.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import pytest
|
||||||
|
import confini
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
root_dir = os.path.dirname(script_dir)
|
||||||
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def load_config():
|
||||||
|
config_dir = os.path.join(root_dir, '.config/test')
|
||||||
|
conf = confini.Config(config_dir, 'CICTEST')
|
||||||
|
conf.process()
|
||||||
|
logg.debug('config {}'.format(conf))
|
||||||
|
return conf
|
115
apps/cic-cache/tests/fixtures_database.py
Normal file
115
apps/cic-cache/tests/fixtures_database.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import pytest
|
||||||
|
import sqlparse
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
from cic_cache.db import dsn_from_config
|
||||||
|
|
||||||
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def database_engine(
|
||||||
|
load_config,
|
||||||
|
):
|
||||||
|
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||||
|
try:
|
||||||
|
os.unlink(load_config.get('DATABASE_NAME'))
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
dsn = dsn_from_config(load_config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
return dsn
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def init_database(
|
||||||
|
load_config,
|
||||||
|
database_engine,
|
||||||
|
):
|
||||||
|
|
||||||
|
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
|
||||||
|
|
||||||
|
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||||
|
rconn = SessionBase.engine.raw_connection()
|
||||||
|
f = open(os.path.join(schemadir, 'db.sql'))
|
||||||
|
s = f.read()
|
||||||
|
f.close()
|
||||||
|
rconn.executescript(s)
|
||||||
|
|
||||||
|
else:
|
||||||
|
rconn = SessionBase.engine.raw_connection()
|
||||||
|
rcursor = rconn.cursor()
|
||||||
|
|
||||||
|
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
|
||||||
|
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
|
||||||
|
|
||||||
|
f = open(os.path.join(schemadir, 'db.sql'))
|
||||||
|
s = f.read()
|
||||||
|
f.close()
|
||||||
|
r = re.compile(r'^[A-Z]', re.MULTILINE)
|
||||||
|
for l in sqlparse.parse(s):
|
||||||
|
strl = str(l)
|
||||||
|
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
|
||||||
|
if not re.search(r, strl):
|
||||||
|
logg.warning('skipping parsed query line {}'.format(strl))
|
||||||
|
continue
|
||||||
|
rcursor.execute(strl)
|
||||||
|
rconn.commit()
|
||||||
|
|
||||||
|
rcursor.execute('SET search_path TO public')
|
||||||
|
|
||||||
|
# this doesn't work when run separately, no idea why
|
||||||
|
# functions have been manually added to original schema from cic-eth
|
||||||
|
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
|
||||||
|
# s = f.read()
|
||||||
|
# f.close()
|
||||||
|
# rcursor.execute(s)
|
||||||
|
#
|
||||||
|
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
|
||||||
|
# s = f.read()
|
||||||
|
# f.close()
|
||||||
|
# rcursor.execute(s)
|
||||||
|
|
||||||
|
rcursor.close()
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
yield session
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def list_tokens(
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
'foo': '0x' + os.urandom(20).hex(),
|
||||||
|
'bar': '0x' + os.urandom(20).hex(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def list_actors(
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
'alice': '0x' + os.urandom(20).hex(),
|
||||||
|
'bob': '0x' + os.urandom(20).hex(),
|
||||||
|
'charlie': '0x' + os.urandom(20).hex(),
|
||||||
|
'diane': '0x' + os.urandom(20).hex(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def list_defaults(
|
||||||
|
):
|
||||||
|
|
||||||
|
return {
|
||||||
|
'block': 420000,
|
||||||
|
}
|
73
apps/cic-cache/tests/test_cache.py
Normal file
73
apps/cic-cache/tests/test_cache.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache import db
|
||||||
|
from cic_cache import BloomCache
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = init_database
|
||||||
|
|
||||||
|
tx_number = 13
|
||||||
|
tx_hash_first = '0x' + os.urandom(32).hex()
|
||||||
|
val = 15000
|
||||||
|
nonce = 1
|
||||||
|
dt = datetime.datetime.utcnow()
|
||||||
|
db.add_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash_first,
|
||||||
|
list_defaults['block'],
|
||||||
|
tx_number,
|
||||||
|
list_actors['alice'],
|
||||||
|
list_actors['bob'],
|
||||||
|
list_tokens['foo'],
|
||||||
|
list_tokens['foo'],
|
||||||
|
True,
|
||||||
|
dt.timestamp(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
tx_number = 42
|
||||||
|
tx_hash_second = '0x' + os.urandom(32).hex()
|
||||||
|
tx_signed_second = '0x' + os.urandom(128).hex()
|
||||||
|
nonce = 1
|
||||||
|
dt -= datetime.timedelta(hours=1)
|
||||||
|
db.add_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash_second,
|
||||||
|
list_defaults['block']-1,
|
||||||
|
tx_number,
|
||||||
|
list_actors['diane'],
|
||||||
|
list_actors['alice'],
|
||||||
|
list_tokens['foo'],
|
||||||
|
list_tokens['foo'],
|
||||||
|
False,
|
||||||
|
dt.timestamp(),
|
||||||
|
)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
b = c.load_transactions(0, 100)
|
||||||
|
|
||||||
|
assert b[0] == list_defaults['block'] - 1
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
c.load_transactions_account(list_actors['alice'],0, 100)
|
||||||
|
|
||||||
|
assert b[0] == list_defaults['block'] - 1
|
Loading…
Reference in New Issue
Block a user