Compare commits
41 Commits
lash/expan
...
bvander/sw
| Author | SHA1 | Date | |
|---|---|---|---|
| 894d8bcd21 | |||
| c4cb095a29 | |||
| 05b8bbbbca | |||
| 1ce32fbbe0 | |||
| 3fd5e77e2c | |||
| e27a49ef33 | |||
|
|
fffb2bc3f4 | ||
|
|
8910fb0759 | ||
|
|
c84239c820 | ||
|
|
452047b900 | ||
|
|
b8be457c41 | ||
|
|
0ec9813e5f | ||
|
|
defa7797dc | ||
|
|
bb3d38a1f9 | ||
| 3be1c1b33d | |||
|
|
d6c763f2d7 | ||
| b7942ddcfa | |||
|
8de5dc1540
|
|||
| fad0a4b580 | |||
| 0672a17d2e | |||
| f764b73f66 | |||
| 806b82504f | |||
| ac76e14129 | |||
| 1c78f4d6d6 | |||
| 0d6e228f8a | |||
| 7a3cb7ab75 | |||
| 992c7b4022 | |||
| f19173001e | |||
|
|
f82bb4515d | ||
| 24e6db7d87 | |||
| ecdfb9bc5a | |||
| 30415ac997 | |||
| d5a8b77349 | |||
|
|
ed2521b582 | ||
|
|
395930106a | ||
|
|
ee1452e530 | ||
|
|
8cdaf9f28a
|
||
|
|
402b968b6d | ||
|
|
aa13517534 | ||
|
|
884b18f2f1 | ||
|
|
494a8f3e88 |
37
.env
Normal file
37
.env
Normal file
@@ -0,0 +1,37 @@
|
||||
DOMAIN=localhost
|
||||
|
||||
STACK_NAME=cic-net
|
||||
|
||||
TRAEFIK_PUBLIC_NETWORK=traefik-public
|
||||
TRAEFIK_TAG=cic.net
|
||||
TRAEFIK_PUBLIC_TAG=traefik-public
|
||||
|
||||
FRONTEND_ENV=dev
|
||||
|
||||
# Flower
|
||||
FLOWER_BASIC_AUTH=admin:changethis
|
||||
|
||||
# Postgres
|
||||
DATABASE_HOST=postgres
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_ENGINE=postgresql
|
||||
DATABASE_DRIVER=psycopg2
|
||||
DATABASE_USER=postgres
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_DB=0
|
||||
|
||||
# Celery Broker
|
||||
CELERY_BROKER_URL=redis://redis:6379
|
||||
CELERY_RESULT_URL=redis://redis:6379
|
||||
|
||||
# Blockchain node
|
||||
ETH_PROVIDER=http://eth:8545
|
||||
CIC_CHAIN_SPEC=evm:bloxberg:8996
|
||||
|
||||
# PgAdmin
|
||||
PGADMIN_LISTEN_PORT=5050
|
||||
PGADMIN_DEFAULT_EMAIL=admin@cic.net
|
||||
PGADMIN_DEFAULT_PASSWORD=changethis
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -14,3 +14,5 @@ build/
|
||||
**/.venv
|
||||
.idea
|
||||
**/.vim
|
||||
docker-stack.yml
|
||||
.npm/
|
||||
|
||||
@@ -1,14 +1,99 @@
|
||||
include:
|
||||
- local: 'ci_templates/.cic-template.yml'
|
||||
- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
# - local: 'ci_templates/.cic-template.yml'
|
||||
# - local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||
- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
# - local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
|
||||
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- release
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||
MR_IMAGE_TAG: mr-$CI_COMMIT_SHORT_SHA
|
||||
|
||||
# todo you can probably just build the single image w/o docker-compose
|
||||
build-merge-request:
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
stage: build
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
CI_DEBUG_TRACE: "true"
|
||||
script:
|
||||
- TAG=$MR_IMAGE_TAG FRONTEND_ENV=dev sh ./scripts/build-push.sh
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: always
|
||||
|
||||
build-staging:
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
tags:
|
||||
- integration
|
||||
#- blocal
|
||||
variables:
|
||||
CI_DEBUG_TRACE: "true"
|
||||
stage: build
|
||||
script:
|
||||
- TAG=stag FRONTEND_ENV=staging sh ./scripts/build-push.sh
|
||||
only:
|
||||
- staging
|
||||
|
||||
deploy-staging:
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
- pip install docker-auto-labels
|
||||
tags:
|
||||
- integration
|
||||
stage: deploy
|
||||
script:
|
||||
- >
|
||||
DOMAIN=stag.grassrootseconomics.net
|
||||
TRAEFIK_TAG=grassrootseconomics.net
|
||||
STACK_NAME=stag-cic-net
|
||||
TAG=stag
|
||||
sh ./scripts/deploy.sh
|
||||
environment:
|
||||
name: staging
|
||||
url: https://stag.grassrootseconomics.net
|
||||
only:
|
||||
- staging
|
||||
|
||||
#build-prod:
|
||||
# stage: build
|
||||
# script:
|
||||
# - TAG=prod FRONTEND_ENV=production sh ./scripts/build-push.sh
|
||||
# only:
|
||||
# - production
|
||||
# tags:
|
||||
# - build
|
||||
# - test
|
||||
#
|
||||
#deploy-prod:
|
||||
# stage: deploy
|
||||
# script:
|
||||
# - >
|
||||
# DOMAIN=demo1.com
|
||||
# TRAEFIK_TAG=demo1.com
|
||||
# STACK_NAME=demo1-com
|
||||
# TAG=prod
|
||||
# sh ./scripts/deploy.sh
|
||||
# environment:
|
||||
# name: production
|
||||
# url: https://demo1.com
|
||||
# only:
|
||||
# - production
|
||||
# tags:
|
||||
# - swarm
|
||||
# - prod
|
||||
|
||||
106
README.md
106
README.md
@@ -1,42 +1,104 @@
|
||||
# cic-internal-integration
|
||||
|
||||
## Getting started
|
||||
## Backend Requirements
|
||||
|
||||
## Make some keys
|
||||
* [Docker](https://www.docker.com/).
|
||||
* [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
## Backend local development
|
||||
|
||||
* Start the stack with Docker Compose:
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
* Now you can open your browser and interact with these URLs:
|
||||
|
||||
Frontend (CICADA), built with Docker, with routes handled based on the path: http://localhost
|
||||
|
||||
PGAdmin, PostgreSQL web administration: http://localhost:5050
|
||||
|
||||
Flower, administration of Celery tasks: http://localhost:5555
|
||||
|
||||
Traefik UI, to see how the routes are being handled by the proxy: http://localhost:8090
|
||||
|
||||
**Note**: The first time you start your stack, it might take a minute for it to be ready. While the backend waits for the database to be ready and configures everything. You can check the logs to monitor it.
|
||||
|
||||
To check the logs, run:
|
||||
|
||||
```bash
|
||||
docker-compose logs
|
||||
```
|
||||
|
||||
To check the logs of a specific service, add the name of the service, e.g.:
|
||||
|
||||
```bash
|
||||
docker-compose logs backend
|
||||
```
|
||||
|
||||
If your Docker is not running in `localhost` (the URLs above wouldn't work) check the sections below on **Development with Docker Toolbox** and **Development with a custom IP**.
|
||||
|
||||
#### Deploy the stack locally
|
||||
|
||||
If you want to run the docker stack locally on swarm
|
||||
|
||||
```
|
||||
docker build -t bloxie . && docker run -v "$(pwd)/keys:/root/keys" --rm -it -t bloxie account new --chain /root/bloxberg.json --keys-path /root/keys
|
||||
docker-compose -f docker-compose.yml -f docker-compose.override.yml config > docker-stack.yml
|
||||
```
|
||||
|
||||
```
|
||||
docker node update z1ehkrw1mvqlxc2udwt4xpype --label-add cic-net.app-db-data=true
|
||||
docker stack deploy -c docker-stack.yml cic-net
|
||||
```
|
||||
|
||||
|
||||
### Prepare the repo
|
||||
## Backend local development, additional details
|
||||
|
||||
This is stuff we need to put in makefile but for now...
|
||||
**fill me in**
|
||||
|
||||
File mounts and permisssions need to be set
|
||||
```
|
||||
chmod -R 755 scripts/initdb apps/cic-meta/scripts/initdb
|
||||
````
|
||||
### Docker Compose Override
|
||||
|
||||
start cluster
|
||||
```
|
||||
docker-compose up
|
||||
During development, you can change Docker Compose settings that will only affect the local development environment, in the file `docker-compose.override.yml`.
|
||||
|
||||
The changes to that file only affect the local development environment, not the production environment. So, you can add "temporary" changes that help the development workflow.
|
||||
|
||||
For example, the directory with the backend code is mounted as a Docker "host volume", mapping the code you change live to the directory inside the container. That allows you to test your changes right away, without having to build the Docker image again. It should only be done during development, for production, you should build the Docker image with a recent version of the backend code. But during development, it allows you to iterate very fast.
|
||||
|
||||
There is also a command override that runs `/start-reload.sh` (included in the base image) instead of the default `/start.sh` (also included in the base image). It starts a single server process (instead of multiple, as would be for production) and reloads the process whenever the code changes. Have in mind that if you have a syntax error and save the Python file, it will break and exit, and the container will stop. After that, you can restart the container by fixing the error and running again:
|
||||
|
||||
```console
|
||||
$ docker-compose up -d
|
||||
```
|
||||
|
||||
stop cluster
|
||||
```
|
||||
docker-compose down
|
||||
There is also a commented out `command` override, you can uncomment it and comment the default one. It makes the backend container run a process that does "nothing", but keeps the container alive. That allows you to get inside your running container and execute commands inside, for example a Python interpreter to test installed dependencies, or start the development server that reloads when it detects changes, or start a Jupyter Notebook session.
|
||||
|
||||
To get inside the container with a `bash` session you can start the stack with:
|
||||
|
||||
```console
|
||||
$ docker-compose up -d
|
||||
```
|
||||
|
||||
delete data
|
||||
```
|
||||
docker-compose down -v
|
||||
and then `exec` inside the running container:
|
||||
|
||||
```console
|
||||
$ docker-compose exec backend bash
|
||||
```
|
||||
|
||||
rebuild an images
|
||||
```
|
||||
docker-compose up --build <service_name>
|
||||
You should see an output like:
|
||||
|
||||
```console
|
||||
root@7f2607af31c3:/app#
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Test running stack
|
||||
|
||||
If your stack is already up and you just want to run the tests, you can use:
|
||||
|
||||
```bash
|
||||
docker-compose exec data-seeding /script/run_ussd_user_imports.sh
|
||||
```
|
||||
|
||||
Deployment variables are writtend to service-configs/.env after everthing is up.
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
.git
|
||||
.cache
|
||||
.dot
|
||||
**/doc
|
||||
**/doc
|
||||
**/node_modules/
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# The solc image messes up the alpine environment, so we have to go all over again
|
||||
FROM python:3.8.6-slim-buster
|
||||
|
||||
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
|
||||
LABEL spdx-license-identifier="GPL-3.0-or-later"
|
||||
LABEL description="Base layer for buiding development images for the cic component suite"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y git gcc g++ libpq-dev && \
|
||||
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
|
||||
|
||||
|
||||
RUN echo installing nodejs tooling
|
||||
|
||||
COPY ./dev/nvm.sh /root/
|
||||
|
||||
# Install nvm with node and npm
|
||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||
ENV NVM_DIR /root/.nvm
|
||||
ENV NODE_VERSION 15.3.0
|
||||
ENV BANCOR_NODE_VERSION 10.16.0
|
||||
|
||||
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
|
||||
&& . $NVM_DIR/nvm.sh \
|
||||
&& nvm install $NODE_VERSION \
|
||||
&& nvm alias default $NODE_VERSION \
|
||||
&& nvm use $NODE_VERSION \
|
||||
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
|
||||
# owner of these files is "1001" by default - wtf
|
||||
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
|
||||
|
||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
## this is an example base image if we wanted one for all the other apps. Its just OS level things
|
||||
@@ -4,3 +4,4 @@ omit =
|
||||
scripts/*
|
||||
cic_cache/db/migrations/*
|
||||
cic_cache/version.py
|
||||
cic_cache/cli
|
||||
|
||||
@@ -1,52 +1,41 @@
|
||||
.cic_cache_variables:
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-cache:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
#build-mr-cic-cache:
|
||||
# extends:
|
||||
# - .py_build_merge_request
|
||||
# - .cic_cache_variables
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/cic-cache/**/*
|
||||
# when: always
|
||||
|
||||
test-mr-cic-cache:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
extends:
|
||||
- .cic_cache_variables
|
||||
- .cic_cache_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/$APP_NAME:$MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-cache"]
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
allow_failure: true
|
||||
needs: ["build-merge-request"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-cache:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
|
||||
|
||||
|
||||
1
apps/cic-cache/MANIFEST.in
Normal file
1
apps/cic-cache/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
||||
include *requirements.txt cic_cache/data/config/*
|
||||
@@ -55,15 +55,37 @@ class Api:
|
||||
queue=callback_queue,
|
||||
)
|
||||
|
||||
def list(self, offset, limit, address=None):
|
||||
def list(self, offset=0, limit=100, address=None, oldest=False):
|
||||
s = celery.signature(
|
||||
'cic_cache.tasks.tx.tx_filter',
|
||||
[
|
||||
0,
|
||||
100,
|
||||
offset,
|
||||
limit,
|
||||
address,
|
||||
oldest,
|
||||
],
|
||||
queue=None
|
||||
queue=self.queue,
|
||||
)
|
||||
if self.callback_param != None:
|
||||
s.link(self.callback_success).on_error(self.callback_error)
|
||||
|
||||
t = s.apply_async()
|
||||
|
||||
return t
|
||||
|
||||
|
||||
def list_content(self, offset=0, limit=100, address=None, block_offset=None, block_limit=None, oldest=False):
|
||||
s = celery.signature(
|
||||
'cic_cache.tasks.tx.tx_filter_content',
|
||||
[
|
||||
offset,
|
||||
limit,
|
||||
address,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest,
|
||||
],
|
||||
queue=self.queue,
|
||||
)
|
||||
if self.callback_param != None:
|
||||
s.link(self.callback_success).on_error(self.callback_error)
|
||||
|
||||
@@ -10,12 +10,16 @@ from cic_cache.db.list import (
|
||||
list_transactions_mined,
|
||||
list_transactions_account_mined,
|
||||
list_transactions_mined_with_data,
|
||||
list_transactions_mined_with_data_index,
|
||||
list_transactions_account_mined_with_data_index,
|
||||
list_transactions_account_mined_with_data,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
DEFAULT_FILTER_SIZE = 8192 * 8
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
class Cache:
|
||||
|
||||
@@ -32,7 +36,7 @@ class BloomCache(Cache):
|
||||
return n
|
||||
|
||||
|
||||
def load_transactions(self, offset, limit):
|
||||
def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
||||
|
||||
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
||||
@@ -49,7 +53,7 @@ class BloomCache(Cache):
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_mined(self.session, offset, limit)
|
||||
rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
@@ -58,7 +62,12 @@ class BloomCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r[0]
|
||||
else:
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
@@ -67,7 +76,7 @@ class BloomCache(Cache):
|
||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||
|
||||
|
||||
def load_transactions_account(self, address, offset, limit):
|
||||
def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
||||
|
||||
:param address: Address to retrieve transactions for.
|
||||
@@ -79,7 +88,7 @@ class BloomCache(Cache):
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_account_mined(self.session, address, offset, limit)
|
||||
rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
@@ -88,7 +97,12 @@ class BloomCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r[0]
|
||||
else:
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
@@ -99,8 +113,21 @@ class BloomCache(Cache):
|
||||
|
||||
class DataCache(Cache):
|
||||
|
||||
def load_transactions_with_data(self, offset, end):
|
||||
rows = list_transactions_mined_with_data(self.session, offset, end)
|
||||
def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
if limit == 0:
|
||||
limit = DEFAULT_LIMIT
|
||||
rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||
return self.__process_rows(rows, oldest)
|
||||
|
||||
|
||||
def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
if limit == 0:
|
||||
limit = DEFAULT_LIMIT
|
||||
rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||
return self.__process_rows(rows, oldest)
|
||||
|
||||
|
||||
def __process_rows(self, rows, oldest):
|
||||
tx_cache = []
|
||||
highest_block = -1;
|
||||
lowest_block = -1;
|
||||
@@ -108,7 +135,12 @@ class DataCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r['block_number']
|
||||
lowest_block = r['block_number']
|
||||
lowest_block = r['block_number']
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r['block_number']
|
||||
else:
|
||||
lowest_block = r['block_number']
|
||||
tx_type = 'unknown'
|
||||
|
||||
if r['value'] != None:
|
||||
|
||||
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# local imports
|
||||
from .base import *
|
||||
from .chain import (
|
||||
EthChainInterface,
|
||||
chain_interface,
|
||||
)
|
||||
from .rpc import RPC
|
||||
from .arg import ArgumentParser
|
||||
from .config import Config
|
||||
from .celery import CeleryApp
|
||||
from .registry import (
|
||||
connect_registry,
|
||||
connect_token_registry,
|
||||
connect_declarator,
|
||||
)
|
||||
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# external imports
|
||||
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
|
||||
|
||||
# local imports
|
||||
from .base import (
|
||||
CICFlag,
|
||||
Flag,
|
||||
)
|
||||
|
||||
|
||||
class ArgumentParser(BaseArgumentParser):
|
||||
|
||||
def process_local_flags(self, local_arg_flags):
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-cache', help='Task queue')
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
|
||||
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
|
||||
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# standard imports
|
||||
import enum
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
argflag_std_read,
|
||||
argflag_std_write,
|
||||
argflag_std_base,
|
||||
Flag,
|
||||
)
|
||||
|
||||
class CICFlag(enum.IntEnum):
|
||||
|
||||
# celery - nibble 1
|
||||
CELERY = 1
|
||||
|
||||
# redis - nibble 2
|
||||
# REDIS = 16
|
||||
# REDIS_CALLBACK = 32
|
||||
|
||||
# chain - nibble 3
|
||||
CHAIN = 256
|
||||
|
||||
# sync - nibble 4
|
||||
SYNCER = 4096
|
||||
|
||||
|
||||
argflag_local_task = CICFlag.CELERY
|
||||
#argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
|
||||
argflag_local_chain = CICFlag.CHAIN
|
||||
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN
|
||||
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CeleryApp:
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
backend_url = config.get('CELERY_RESULT_URL')
|
||||
broker_url = config.get('CELERY_BROKER_URL')
|
||||
celery_app = None
|
||||
if backend_url != None:
|
||||
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
|
||||
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
|
||||
else:
|
||||
celery_app = celery.Celery(broker=broker_url)
|
||||
logg.info('creating celery app without results backend on {}'.format(broker_url))
|
||||
|
||||
return celery_app
|
||||
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# external imports
|
||||
from chainlib.eth.block import (
|
||||
block_by_number,
|
||||
Block,
|
||||
)
|
||||
from chainlib.eth.tx import (
|
||||
receipt,
|
||||
Tx,
|
||||
)
|
||||
from chainlib.interface import ChainInterface
|
||||
|
||||
|
||||
class EthChainInterface(ChainInterface):
|
||||
|
||||
def __init__(self):
|
||||
self._tx_receipt = receipt
|
||||
self._block_by_number = block_by_number
|
||||
self._block_from_src = Block.from_src
|
||||
self._src_normalize = Tx.src_normalize
|
||||
|
||||
chain_interface = EthChainInterface()
|
||||
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
Config as BaseConfig,
|
||||
Flag,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from .base import CICFlag
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Config(BaseConfig):
|
||||
|
||||
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
|
||||
|
||||
@classmethod
|
||||
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
|
||||
expanded_base_config_dir = [cls.local_base_config_dir]
|
||||
if base_config_dir != None:
|
||||
if isinstance(base_config_dir, str):
|
||||
base_config_dir = [base_config_dir]
|
||||
for d in base_config_dir:
|
||||
expanded_base_config_dir.append(d)
|
||||
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
|
||||
|
||||
local_args_override = {}
|
||||
# if local_arg_flags & CICFlag.REDIS:
|
||||
# local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
|
||||
# local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
|
||||
# local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
|
||||
# local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
|
||||
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
|
||||
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
|
||||
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
|
||||
|
||||
config.dict_override(local_args_override, 'local cli args')
|
||||
|
||||
# if local_arg_flags & CICFlag.REDIS_CALLBACK:
|
||||
# config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
|
||||
# config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
|
||||
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
|
||||
return config
|
||||
|
||||
|
||||
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
|
||||
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def connect_token_registry(self, conn, chain_spec, sender_address=ZERO_ADDRESS):
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
|
||||
logg.debug('using token registry address {}'.format(token_registry_address))
|
||||
lookup = TokenIndexLookup(chain_spec, token_registry_address)
|
||||
CICRegistry.add_lookup(lookup)
|
||||
|
||||
|
||||
def connect_declarator(self, conn, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||
logg.debug('using declarator address {}'.format(declarator_address))
|
||||
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
|
||||
CICRegistry.add_lookup(lookup)
|
||||
|
||||
|
||||
def connect_registry(conn, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
|
||||
CICRegistry.address = registry_address
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
|
||||
return registry
|
||||
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.connection import (
|
||||
RPCConnection,
|
||||
ConnType,
|
||||
)
|
||||
from chainlib.eth.connection import EthUnixSignerConnection
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RPC:
|
||||
|
||||
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
|
||||
self.chain_spec = chain_spec
|
||||
self.rpc_provider = rpc_provider
|
||||
self.signer_provider = signer_provider
|
||||
|
||||
|
||||
def get_default(self):
|
||||
return RPCConnection.connect(self.chain_spec, 'default')
|
||||
|
||||
|
||||
@staticmethod
|
||||
def from_config(config):
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, 'default')
|
||||
if config.get('SIGNER_PROVIDER'):
|
||||
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, tag='signer')
|
||||
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, 'signer')
|
||||
rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
|
||||
logg.info('set up rpc: {}'.format(rpc))
|
||||
return rpc
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)
|
||||
|
||||
|
||||
|
||||
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:6379
|
||||
result_url =
|
||||
queue = cic-cache
|
||||
debug = 0
|
||||
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
|
||||
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[database]
|
||||
engine =
|
||||
driver =
|
||||
host =
|
||||
port =
|
||||
name = cic-cache
|
||||
user =
|
||||
password =
|
||||
debug = 0
|
||||
pool_size = 0
|
||||
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[signer]
|
||||
provider =
|
||||
4
apps/cic-cache/cic_cache/data/config/syncer.ini
Normal file
4
apps/cic-cache/cic_cache/data/config/syncer.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
offset = 0
|
||||
no_history = 0
|
||||
@@ -13,6 +13,9 @@ def list_transactions_mined(
|
||||
session,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
@@ -23,15 +26,62 @@ def list_transactions_mined(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_mined_with_data(
|
||||
session,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
:param block_offset: First block to include in search
|
||||
:type block_offset: int
|
||||
:param block_limit: Last block to include in search
|
||||
:type block_limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_mined_with_data_index(
|
||||
session,
|
||||
offset,
|
||||
end,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
@@ -42,7 +92,87 @@ def list_transactions_mined_with_data(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, offset, end)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, offset, end)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, offset, end)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_account_mined_with_data_index(
|
||||
session,
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit, filtered by address
|
||||
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
def list_transactions_account_mined_with_data(
|
||||
session,
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
:param block_offset: First block to include in search
|
||||
:type block_offset: int
|
||||
:param block_limit: Last block to include in search
|
||||
:type block_limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
@@ -53,6 +183,9 @@ def list_transactions_account_mined(
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
||||
|
||||
@@ -65,7 +198,20 @@ def list_transactions_account_mined(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2021-04-01 08:10:29.156243
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
from chainsyncer.db.migrations.default.export import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
@@ -100,3 +100,4 @@ class SessionBase(Model):
|
||||
logg.debug('destroying session {}'.format(session_key))
|
||||
session.commit()
|
||||
session.close()
|
||||
del SessionBase.localsessions[session_key]
|
||||
|
||||
@@ -91,13 +91,14 @@ def process_transactions_all_data(session, env):
|
||||
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
return None
|
||||
|
||||
offset = r[1]
|
||||
end = r[2]
|
||||
logg.debug('got data request {}'.format(env))
|
||||
block_offset = r[1]
|
||||
block_end = r[2]
|
||||
if int(r[2]) < int(r[1]):
|
||||
raise ValueError('cart before the horse, dude')
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
|
||||
@@ -8,15 +8,7 @@ import sys
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
import celery
|
||||
import sqlalchemy
|
||||
import rlp
|
||||
import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainlib.chain import ChainSpec
|
||||
@@ -34,6 +26,7 @@ from chainsyncer.driver.history import HistorySyncer
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import (
|
||||
dsn_from_config,
|
||||
add_tag,
|
||||
@@ -43,32 +36,36 @@ from cic_cache.runnable.daemons.filters import (
|
||||
FaucetFilter,
|
||||
)
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
def add_block_args(argparser):
|
||||
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
|
||||
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
return argparser
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
logg = cic_base.log.create()
|
||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
|
||||
args = cic_base.argparse.parse(argparser, logg)
|
||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||
|
||||
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
|
||||
config.add(args.no_history, '_NO_HISTORY', True)
|
||||
|
||||
cic_base.config.log(config)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
|
||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
# set up rpc
|
||||
rpc = cic_cache.cli.RPC.from_config(config)
|
||||
conn = rpc.get_default()
|
||||
|
||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||
# set up chain provisions
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
registry = None
|
||||
try:
|
||||
registry = cic_cache.cli.connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
except UnknownContractError as e:
|
||||
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||
sys.exit(1)
|
||||
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||
|
||||
|
||||
def register_filter_tags(filters, session):
|
||||
@@ -95,14 +92,12 @@ def main():
|
||||
|
||||
syncers = []
|
||||
|
||||
#if SQLBackend.first(chain_spec):
|
||||
# backend = SQLBackend.initial(chain_spec, block_offset)
|
||||
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||
|
||||
if len(syncer_backends) == 0:
|
||||
initial_block_start = config.get('SYNCER_HISTORY_START')
|
||||
initial_block_start = config.get('SYNCER_OFFSET')
|
||||
initial_block_offset = block_offset
|
||||
if config.get('_NO_HISTORY'):
|
||||
if config.get('SYNCER_NO_HISTORY'):
|
||||
initial_block_start = block_offset
|
||||
initial_block_offset += 1
|
||||
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||
@@ -112,10 +107,10 @@ def main():
|
||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HistorySyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||
|
||||
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
||||
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HeadSyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
from cic_cache.cache import BloomCache
|
||||
from cic_cache.cache import (
|
||||
BloomCache,
|
||||
DataCache,
|
||||
)
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
def tx_filter(self, offset, limit, address=None, oldest=False, encoding='hex'):
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
|
||||
session = SessionBase.create_session()
|
||||
@@ -17,9 +20,9 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
c = BloomCache(session)
|
||||
b = None
|
||||
if address == None:
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit, oldest=oldest)
|
||||
else:
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit, oldest=oldest)
|
||||
|
||||
session.close()
|
||||
|
||||
@@ -35,4 +38,17 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
return o
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def tx_filter_content(self, offset, limit, address=None, block_offset=None, block_limit=None, oldest=False, encoding='hex'):
|
||||
session = SessionBase.create_session()
|
||||
|
||||
c = DataCache(session)
|
||||
b = None
|
||||
if address == None:
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
else:
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data_index(address, offset, limit, block_offset=block_offset, block_limit=block_limit)
|
||||
|
||||
session.close()
|
||||
|
||||
return (lowest_block, highest_block, tx_cache,)
|
||||
|
||||
@@ -4,8 +4,8 @@ import semver
|
||||
version = (
|
||||
0,
|
||||
2,
|
||||
0,
|
||||
'alpha.2',
|
||||
1,
|
||||
'alpha.1',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[bancor]
|
||||
dir =
|
||||
@@ -1,4 +1,3 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
chain_spec =
|
||||
trust_address =
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
[bancor]
|
||||
registry_address =
|
||||
dir = /usr/local/share/bancor
|
||||
@@ -1,4 +1,3 @@
|
||||
[cic]
|
||||
chain_spec =
|
||||
registry_address =
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[eth]
|
||||
provider = http://localhost:63545
|
||||
@@ -1,3 +1,4 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
history_start = 0
|
||||
offset = 0
|
||||
no_history = 0
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[eth]
|
||||
provider = ws://localhost:8545
|
||||
@@ -1,3 +0,0 @@
|
||||
[syncer]
|
||||
loop_interval = 5
|
||||
history_start = 0
|
||||
@@ -1,37 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
|
||||
COPY requirements.txt .
|
||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
#RUN pip install $pip_extra_index_url_flag .
|
||||
#RUN pip install .[server]
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
COPY /docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./*.sh
|
||||
# Tracker
|
||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# Server
|
||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||
ENTRYPOINT []
|
||||
@@ -1,13 +1,17 @@
|
||||
cic-base~=0.2.0a2
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc3,<0.5.0
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
uwsgi==2.0.19.1
|
||||
moolb~=0.1.0
|
||||
cic-eth-registry~=0.5.6a2
|
||||
moolb~=0.1.1b2
|
||||
cic-eth-registry~=0.5.8a1
|
||||
SQLAlchemy==1.3.20
|
||||
semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
chainsyncer[sql]~=0.0.3a4
|
||||
erc20-faucet~=0.2.2a2
|
||||
chainsyncer[sql]>=0.0.6a1,<0.1.0
|
||||
erc20-faucet>=0.2.4a2, <0.3.0
|
||||
#chainlib-eth==0.0.7a5,<0.1.0
|
||||
chainlib-eth==0.0.7a5
|
||||
#chainlib==0.0.7a4,<0.1.0
|
||||
chainlib==0.0.7a4
|
||||
eth-address-index>=0.1.4a1,<0.2.0
|
||||
|
||||
@@ -23,11 +23,13 @@ licence_files =
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
include_package_data = True
|
||||
packages =
|
||||
cic_cache
|
||||
cic_cache.tasks
|
||||
cic_cache.db
|
||||
cic_cache.db.models
|
||||
cic_cache.cli
|
||||
cic_cache.runnable
|
||||
cic_cache.runnable.daemons
|
||||
cic_cache.runnable.daemons.filters
|
||||
@@ -39,3 +41,4 @@ console_scripts =
|
||||
cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main
|
||||
cic-cache-serverd = cic_cache.runnable.daemons.server:main
|
||||
cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main
|
||||
cic-cache-list = cic_cache.runable.list:main
|
||||
|
||||
@@ -6,5 +6,5 @@ sqlparse==0.4.1
|
||||
pytest-celery==0.0.0a1
|
||||
eth_tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
cic_base[full]==0.1.3a3+build.984b5cff
|
||||
sarafu-faucet~=0.0.4a1
|
||||
sarafu-faucet~=0.0.5a2
|
||||
erc20-transfer-authorization>=0.3.4a1,<0.4.0
|
||||
|
||||
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
@@ -0,0 +1,40 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# external imports
|
||||
import chainlib.cli
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
config_dir = os.path.join(script_dir, '..', 'testdata', 'config')
|
||||
|
||||
|
||||
def test_argumentparserto_config():
|
||||
|
||||
argparser = cic_cache.cli.ArgumentParser()
|
||||
|
||||
local_flags = 0xffff
|
||||
argparser.process_local_flags(local_flags)
|
||||
argparser.add_argument('--foo', type=str)
|
||||
args = argparser.parse_args([
|
||||
'-q', 'baz',
|
||||
'--offset', '13',
|
||||
'--no-history',
|
||||
'-r','0xdeadbeef',
|
||||
'-vv',
|
||||
'--foo', 'bar',
|
||||
])
|
||||
|
||||
extra_args = {
|
||||
'foo': '_BARBARBAR',
|
||||
}
|
||||
config = cic_cache.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
|
||||
|
||||
assert config.get('_BARBARBAR') == 'bar'
|
||||
assert config.get('CELERY_QUEUE') == 'baz'
|
||||
assert config.get('SYNCER_NO_HISTORY') == True
|
||||
assert config.get('SYNCER_OFFSET') == 13
|
||||
assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'
|
||||
|
||||
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# standard imports
|
||||
import tempfile
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
|
||||
def test_cli_celery():
|
||||
cf = tempfile.mkdtemp()
|
||||
|
||||
config = {
|
||||
'CELERY_RESULT_URL': 'filesystem://' + cf,
|
||||
}
|
||||
cic_cache.cli.CeleryApp.from_config(config)
|
||||
|
||||
config['CELERY_BROKER_URL'] = 'filesystem://' + cf
|
||||
cic_cache.cli.CeleryApp.from_config(config)
|
||||
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# external imports
|
||||
import pytest
|
||||
from chainlib.eth.gas import (
|
||||
Gas,
|
||||
RPCGasOracle,
|
||||
)
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
Block,
|
||||
)
|
||||
from chainlib.eth.pytest.fixtures_chain import default_chain_spec
|
||||
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||
from hexathon import add_0x
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_cli_rpc(
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
default_chain_spec,
|
||||
):
|
||||
config = {
|
||||
'CHAIN_SPEC': str(default_chain_spec),
|
||||
'RPC_HTTP_PROVIDER': 'http://localhost:8545',
|
||||
}
|
||||
rpc = cic_cache.cli.RPC.from_config(config, default_label='foo')
|
||||
conn = rpc.get_by_label('foo')
|
||||
#o = block_latest()
|
||||
#conn.do(o)
|
||||
|
||||
|
||||
def test_cli_chain(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
contract_roles,
|
||||
):
|
||||
ifc = cic_cache.cli.EthChainInterface()
|
||||
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||
gas_oracle = RPCGasOracle(conn=eth_rpc)
|
||||
c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
|
||||
recipient = add_0x(os.urandom(20).hex())
|
||||
(tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], recipient, 1024)
|
||||
r = eth_rpc.do(o)
|
||||
|
||||
o = ifc.tx_receipt(r)
|
||||
r = eth_rpc.do(o)
|
||||
assert r['status'] == 1
|
||||
|
||||
o = ifc.block_by_number(1)
|
||||
block_src = eth_rpc.do(o)
|
||||
block = ifc.block_from_src(block_src)
|
||||
assert block.number == 1
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
assert block_src['gasUsed'] == 21000
|
||||
assert block_src['gas_used'] == 21000
|
||||
|
||||
block_src = ifc.src_normalize(block_src)
|
||||
assert block_src['gasUsed'] == 21000
|
||||
assert block_src['gas_used'] == 21000
|
||||
|
||||
@@ -64,7 +64,6 @@ def txs(
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
|
||||
tx_number = 42
|
||||
tx_hash_second = '0x' + os.urandom(32).hex()
|
||||
tx_signed_second = '0x' + os.urandom(128).hex()
|
||||
@@ -93,6 +92,44 @@ def txs(
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def more_txs(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_number = 666
|
||||
tx_hash = '0x' + os.urandom(32).hex()
|
||||
tx_signed = '0x' + os.urandom(128).hex()
|
||||
nonce = 3
|
||||
|
||||
dt = datetime.datetime.utcnow()
|
||||
dt += datetime.timedelta(hours=1)
|
||||
db.add_transaction(
|
||||
session,
|
||||
tx_hash,
|
||||
list_defaults['block']+2,
|
||||
tx_number,
|
||||
list_actors['alice'],
|
||||
list_actors['diane'],
|
||||
list_tokens['bar'],
|
||||
list_tokens['bar'],
|
||||
2048,
|
||||
4096,
|
||||
False,
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
session.commit()
|
||||
|
||||
return [tx_hash] + txs
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def tag_txs(
|
||||
init_database,
|
||||
|
||||
@@ -8,6 +8,7 @@ import json
|
||||
import pytest
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
from cic_cache import BloomCache
|
||||
from cic_cache.cache import DataCache
|
||||
|
||||
@@ -18,7 +19,6 @@ def test_cache(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
):
|
||||
|
||||
@@ -37,9 +37,6 @@ def test_cache(
|
||||
|
||||
def test_cache_data(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
tag_txs,
|
||||
):
|
||||
@@ -47,10 +44,209 @@ def test_cache_data(
|
||||
session = init_database
|
||||
|
||||
c = DataCache(session)
|
||||
b = c.load_transactions_with_data(410000, 420000)
|
||||
b = c.load_transactions_with_data(0, 3) #410000, 420000) #, 100, block_offset=410000, block_limit=420000, oldest=True)
|
||||
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == txs[1]
|
||||
assert b[2][1]['tx_type'] == 'unknown'
|
||||
assert b[2][0]['tx_type'] == 'test.taag'
|
||||
assert b[2][0]['tx_hash'] == txs[0]
|
||||
assert b[2][0]['tx_type'] == 'unknown'
|
||||
assert b[2][1]['tx_type'] == 'test.taag'
|
||||
|
||||
|
||||
def test_cache_ranges(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
more_txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
|
||||
c = BloomCache(session)
|
||||
b = c.load_transactions(0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(1, 2)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions(0, 2)
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 1)
|
||||
assert b[0] == newest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
|
||||
|
||||
def test_cache_ranges_data(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
more_txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
|
||||
c = DataCache(session)
|
||||
|
||||
b = c.load_transactions_with_data(0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(1, 2)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(0, 2)
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_with_data(0, 1)
|
||||
assert b[0] == newest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
assert b[2][2]['tx_hash'] == more_txs[0]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
|
||||
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
[foo]
|
||||
bar_baz = xyzzy
|
||||
@@ -46,7 +46,7 @@ def get_adjusted_balance(self, token_symbol, amount, timestamp):
|
||||
|
||||
|
||||
def aux_setup(rpc, config, sender_address=ZERO_ADDRESS):
|
||||
chain_spec_str = config.get('CIC_CHAIN_SPEC')
|
||||
chain_spec_str = config.get('CHAIN_SPEC')
|
||||
chain_spec = ChainSpec.from_chain_str(chain_spec_str)
|
||||
token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
celery==4.4.7
|
||||
erc20-demurrage-token~=0.0.2a3
|
||||
cic-eth-registry~=0.5.6a1
|
||||
chainlib~=0.0.5a1
|
||||
cic_eth~=0.12.0a2
|
||||
erc20-demurrage-token~=0.0.3a1
|
||||
cic-eth-registry~=0.5.8a1
|
||||
chainlib~=0.0.7a1
|
||||
cic_eth~=0.12.2a4
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = cic-eth-aux-erc20-demurrage-token
|
||||
version = 0.0.2a4
|
||||
version = 0.0.2a6
|
||||
description = cic-eth tasks supporting erc20 demurrage token
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
||||
@@ -5,8 +5,7 @@ pytest-cov==2.10.1
|
||||
eth-tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth~=0.12.0a1
|
||||
liveness~=0.0.1a7
|
||||
eth-accounts-index==0.0.12a1
|
||||
eth-contract-registry==0.5.6a1
|
||||
eth-address-index==0.1.2a1
|
||||
eth-accounts-index==0.1.1a1
|
||||
eth-contract-registry==0.5.8a1
|
||||
eth-address-index==0.2.1a1
|
||||
|
||||
@@ -6,4 +6,5 @@ omit =
|
||||
cic_eth/sync/head.py
|
||||
cic_eth/sync/mempool.py
|
||||
cic_eth/queue/state.py
|
||||
cic_eth/cli
|
||||
*redis*.py
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
.cic_eth_variables:
|
||||
variables:
|
||||
APP_NAME: cic-eth
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-eth:
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
- .py_build_target_dev
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
#build-mr-cic-eth:
|
||||
# extends:
|
||||
# - .cic_eth_variables
|
||||
# - .py_build_target_dev
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/cic-eth/**/*
|
||||
# when: always
|
||||
|
||||
test-mr-cic-eth:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/$APP_NAME:$MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
@@ -34,19 +34,20 @@ test-mr-cic-eth:
|
||||
-r services_requirements.txt
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-eth"]
|
||||
allow_failure: true
|
||||
needs: ["build-merge-request"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-eth:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_eth_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
#build-push-cic-eth:
|
||||
# extends:
|
||||
# - .py_build_push
|
||||
# - .cic_eth_variables
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH == "master"
|
||||
# changes:
|
||||
# - apps/cic-eth/**/*
|
||||
# when: always
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
include *requirements.txt config/test/*
|
||||
include *requirements.txt config/test/* cic_eth/data/config/*
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry~=0.5.6a2
|
||||
cic-eth-registry>=0.5.6a2,<0.6.0
|
||||
hexathon~=0.0.1a7
|
||||
chainqueue~=0.0.2b6
|
||||
eth-erc20~=0.0.10a3
|
||||
chainqueue>=0.0.3a1,<0.1.0
|
||||
eth-erc20>=0.0.10a3,<0.1.0
|
||||
|
||||
@@ -6,6 +6,11 @@ import logging
|
||||
import celery
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.chain import ChainSpec
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
strip_0x,
|
||||
uniform as hex_uniform,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.enum import LockEnum
|
||||
@@ -19,6 +24,12 @@ from cic_eth.error import LockedError
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def normalize_address(a):
|
||||
if a == None:
|
||||
return None
|
||||
return add_0x(hex_uniform(strip_0x(a)))
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
|
||||
"""Task wrapper to set arbitrary locks
|
||||
@@ -32,6 +43,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@@ -53,6 +65,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@@ -72,6 +85,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||
@@ -89,6 +103,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||
@@ -106,6 +121,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||
@@ -123,6 +139,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||
@@ -131,6 +148,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
||||
address = normalize_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
|
||||
@@ -14,7 +14,11 @@ from chainqueue.sql.query import get_tx
|
||||
from chainqueue.sql.state import set_cancel
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from hexathon import strip_0x
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
uniform as hex_uniform,
|
||||
)
|
||||
from potaahto.symbols import snake_and_camel
|
||||
|
||||
# local imports
|
||||
@@ -69,15 +73,17 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
|
||||
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
|
||||
|
||||
query_address = add_0x(hex_uniform(strip_0x(address))) # aaaaargh
|
||||
q = session.query(Otx)
|
||||
q = q.join(TxCache)
|
||||
q = q.filter(TxCache.sender==address)
|
||||
q = q.filter(TxCache.sender==query_address)
|
||||
q = q.filter(Otx.nonce>=nonce+delta)
|
||||
q = q.order_by(Otx.nonce.asc())
|
||||
otxs = q.all()
|
||||
|
||||
tx_hashes = []
|
||||
txs = []
|
||||
gas_total = 0
|
||||
for otx in otxs:
|
||||
tx_raw = bytes.fromhex(strip_0x(otx.signed_tx))
|
||||
tx_new = unpack(tx_raw, chain_spec)
|
||||
@@ -89,8 +95,10 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
tx_new['gas_price'] += 1
|
||||
tx_new['gasPrice'] = tx_new['gas_price']
|
||||
tx_new['nonce'] -= delta
|
||||
gas_total += tx_new['gas_price'] * tx_new['gas']
|
||||
|
||||
logg.debug('tx_new {}'.format(tx_new))
|
||||
logg.debug('gas running total {}'.format(gas_total))
|
||||
|
||||
del(tx_new['hash'])
|
||||
del(tx_new['hash_unsigned'])
|
||||
@@ -122,8 +130,10 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
s = create_check_gas_task(
|
||||
txs,
|
||||
chain_spec,
|
||||
tx_new['from'],
|
||||
gas=tx_new['gas'],
|
||||
#tx_new['from'],
|
||||
address,
|
||||
#gas=tx_new['gas'],
|
||||
gas=gas_total,
|
||||
tx_hashes_hex=tx_hashes,
|
||||
queue=queue,
|
||||
)
|
||||
@@ -132,7 +142,8 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
'cic_eth.admin.ctrl.unlock_send',
|
||||
[
|
||||
chain_spec.asdict(),
|
||||
tx_new['from'],
|
||||
address,
|
||||
#tx_new['from'],
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
@@ -140,7 +151,8 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
'cic_eth.admin.ctrl.unlock_queue',
|
||||
[
|
||||
chain_spec.asdict(),
|
||||
tx_new['from'],
|
||||
address,
|
||||
#tx_new['from'],
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
|
||||
@@ -21,6 +21,7 @@ from chainlib.hash import keccak256_hex_to_hex
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
uniform as hex_uniform,
|
||||
)
|
||||
from chainlib.eth.gas import balance
|
||||
from chainqueue.db.enum import (
|
||||
@@ -307,6 +308,8 @@ class AdminApi:
|
||||
:param address: Ethereum address to return transactions for
|
||||
:type address: str, 0x-hex
|
||||
"""
|
||||
|
||||
address = add_0x(hex_uniform(strip_0x(address)))
|
||||
last_nonce = -1
|
||||
s = celery.signature(
|
||||
'cic_eth.queue.query.get_account_tx',
|
||||
|
||||
@@ -520,9 +520,9 @@ class Api(ApiBase):
|
||||
s_external_get = celery.signature(
|
||||
external_task,
|
||||
[
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
address,
|
||||
],
|
||||
queue=external_queue,
|
||||
)
|
||||
|
||||
@@ -21,7 +21,7 @@ def health(*args, **kwargs):
|
||||
session = SessionBase.create_session()
|
||||
|
||||
config = kwargs['config']
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
logg.debug('check gas balance of gas gifter for chain {}'.format(chain_spec))
|
||||
|
||||
try:
|
||||
|
||||
@@ -15,7 +15,7 @@ logg = logging.getLogger().getChild(__name__)
|
||||
def health(*args, **kwargs):
|
||||
blocked = True
|
||||
max_attempts = 5
|
||||
conn = RPCConnection.connect(kwargs['config'].get('CIC_CHAIN_SPEC'), tag='signer')
|
||||
conn = RPCConnection.connect(kwargs['config'].get('CHAIN_SPEC'), tag='signer')
|
||||
for i in range(max_attempts):
|
||||
idx = i + 1
|
||||
logg.debug('attempt signer connection check {}/{}'.format(idx, max_attempts))
|
||||
|
||||
10
apps/cic-eth/cic_eth/cli/__init__.py
Normal file
10
apps/cic-eth/cic_eth/cli/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# local imports
|
||||
from .base import *
|
||||
from .chain import (
|
||||
EthChainInterface,
|
||||
chain_interface,
|
||||
)
|
||||
from .rpc import RPC
|
||||
from .arg import ArgumentParser
|
||||
from .config import Config
|
||||
from .celery import CeleryApp
|
||||
31
apps/cic-eth/cic_eth/cli/arg.py
Normal file
31
apps/cic-eth/cic_eth/cli/arg.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# external imports
|
||||
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
|
||||
|
||||
# local imports
|
||||
from .base import (
|
||||
CICFlag,
|
||||
Flag,
|
||||
)
|
||||
|
||||
|
||||
class ArgumentParser(BaseArgumentParser):
|
||||
|
||||
def process_local_flags(self, local_arg_flags):
|
||||
if local_arg_flags & CICFlag.REDIS:
|
||||
self.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
self.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
self.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use')
|
||||
if local_arg_flags & CICFlag.REDIS_CALLBACK:
|
||||
self.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
|
||||
self.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
|
||||
self.add_argument('--redis-timeout', default=20.0, type=float, help='Redis callback timeout')
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-eth', help='Task queue')
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
|
||||
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
|
||||
|
||||
|
||||
|
||||
31
apps/cic-eth/cic_eth/cli/base.py
Normal file
31
apps/cic-eth/cic_eth/cli/base.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# standard imports
|
||||
import enum
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
argflag_std_read,
|
||||
argflag_std_write,
|
||||
argflag_std_base,
|
||||
Flag,
|
||||
)
|
||||
|
||||
class CICFlag(enum.IntEnum):
|
||||
|
||||
# celery - nibble 1
|
||||
CELERY = 1
|
||||
|
||||
# redis - nibble 2
|
||||
REDIS = 16
|
||||
REDIS_CALLBACK = 32
|
||||
|
||||
# chain - nibble 3
|
||||
CHAIN = 256
|
||||
|
||||
# sync - nibble 4
|
||||
SYNCER = 4096
|
||||
|
||||
|
||||
argflag_local_task = CICFlag.CELERY
|
||||
argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
|
||||
argflag_local_chain = CICFlag.CHAIN
|
||||
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN
|
||||
24
apps/cic-eth/cic_eth/cli/celery.py
Normal file
24
apps/cic-eth/cic_eth/cli/celery.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CeleryApp:
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
backend_url = config.get('CELERY_RESULT_URL')
|
||||
broker_url = config.get('CELERY_BROKER_URL')
|
||||
celery_app = None
|
||||
if backend_url != None:
|
||||
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
|
||||
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
|
||||
else:
|
||||
celery_app = celery.Celery(broker=broker_url)
|
||||
logg.info('creating celery app without results backend on {}'.format(broker_url))
|
||||
|
||||
return celery_app
|
||||
21
apps/cic-eth/cic_eth/cli/chain.py
Normal file
21
apps/cic-eth/cic_eth/cli/chain.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# external imports
|
||||
from chainlib.eth.block import (
|
||||
block_by_number,
|
||||
Block,
|
||||
)
|
||||
from chainlib.eth.tx import (
|
||||
receipt,
|
||||
Tx,
|
||||
)
|
||||
from chainlib.interface import ChainInterface
|
||||
|
||||
|
||||
class EthChainInterface(ChainInterface):
|
||||
|
||||
def __init__(self):
|
||||
self._tx_receipt = receipt
|
||||
self._block_by_number = block_by_number
|
||||
self._block_from_src = Block.from_src
|
||||
self._src_normalize = Tx.src_normalize
|
||||
|
||||
chain_interface = EthChainInterface()
|
||||
63
apps/cic-eth/cic_eth/cli/config.py
Normal file
63
apps/cic-eth/cic_eth/cli/config.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
Config as BaseConfig,
|
||||
Flag,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from .base import CICFlag
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Config(BaseConfig):
|
||||
|
||||
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
|
||||
|
||||
@classmethod
|
||||
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
|
||||
expanded_base_config_dir = [cls.local_base_config_dir]
|
||||
if base_config_dir != None:
|
||||
if isinstance(base_config_dir, str):
|
||||
base_config_dir = [base_config_dir]
|
||||
for d in base_config_dir:
|
||||
expanded_base_config_dir.append(d)
|
||||
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
|
||||
|
||||
local_args_override = {}
|
||||
if local_arg_flags & CICFlag.REDIS:
|
||||
local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
|
||||
local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
|
||||
local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
|
||||
local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
|
||||
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
|
||||
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
|
||||
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
|
||||
|
||||
config.dict_override(local_args_override, 'local cli args')
|
||||
|
||||
if local_arg_flags & CICFlag.REDIS_CALLBACK:
|
||||
config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
|
||||
config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
|
||||
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
|
||||
return config
|
||||
|
||||
|
||||
90
apps/cic-eth/cic_eth/cli/rpc.py
Normal file
90
apps/cic-eth/cic_eth/cli/rpc.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.connection import (
|
||||
RPCConnection,
|
||||
ConnType,
|
||||
)
|
||||
from chainlib.eth.connection import (
|
||||
EthUnixSignerConnection,
|
||||
EthHTTPSignerConnection,
|
||||
)
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RPC:
|
||||
|
||||
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
|
||||
self.chain_spec = chain_spec
|
||||
self.rpc_provider = rpc_provider
|
||||
self.signer_provider = signer_provider
|
||||
|
||||
|
||||
def get_default(self):
|
||||
return self.get_by_label('default')
|
||||
|
||||
|
||||
def get_by_label(self, label):
|
||||
return RPCConnection.connect(self.chain_spec, label)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def from_config(config, use_signer=False, default_label='default', signer_label='signer'):
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, default_label)
|
||||
if use_signer:
|
||||
|
||||
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, signer_label)
|
||||
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, signer_label)
|
||||
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, signer_label)
|
||||
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, signer_label)
|
||||
rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
|
||||
logg.info('set up rpc: {}'.format(rpc))
|
||||
return rpc
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)
|
||||
|
||||
|
||||
# TOOD: re-implement file backend option from omittec code:
|
||||
#broker = config.get('CELERY_BROKER_URL')
|
||||
#if broker[:4] == 'file':
|
||||
# bq = tempfile.mkdtemp()
|
||||
# bp = tempfile.mkdtemp()
|
||||
# conf_update = {
|
||||
# 'broker_url': broker,
|
||||
# 'broker_transport_options': {
|
||||
# 'data_folder_in': bq,
|
||||
# 'data_folder_out': bq,
|
||||
# 'data_folder_processed': bp,
|
||||
# },
|
||||
# }
|
||||
# if config.true('CELERY_DEBUG'):
|
||||
# conf_update['result_extended'] = True
|
||||
# current_app.conf.update(conf_update)
|
||||
# logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
|
||||
#else:
|
||||
# conf_update = {
|
||||
# 'broker_url': broker,
|
||||
# }
|
||||
# if config.true('CELERY_DEBUG'):
|
||||
# conf_update['result_extended'] = True
|
||||
# current_app.conf.update(conf_update)
|
||||
#
|
||||
#result = config.get('CELERY_RESULT_URL')
|
||||
#if result[:4] == 'file':
|
||||
# rq = tempfile.mkdtemp()
|
||||
# current_app.conf.update({
|
||||
# 'result_backend': 'file://{}'.format(rq),
|
||||
# })
|
||||
# logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
|
||||
#else:
|
||||
# current_app.conf.update({
|
||||
# 'result_backend': result,
|
||||
# })
|
||||
#
|
||||
5
apps/cic-eth/cic_eth/data/config/celery.ini
Normal file
5
apps/cic-eth/cic_eth/data/config/celery.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:6379
|
||||
result_url =
|
||||
queue = cic-eth
|
||||
debug = 0
|
||||
@@ -1,8 +1,6 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
chain_spec = evm:bloxberg:8996
|
||||
tx_retry_delay =
|
||||
trust_address =
|
||||
default_token_symbol = GFT
|
||||
default_token_symbol =
|
||||
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
|
||||
run_dir = /run
|
||||
10
apps/cic-eth/cic_eth/data/config/database.ini
Normal file
10
apps/cic-eth/cic_eth/data/config/database.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[database]
|
||||
engine =
|
||||
driver =
|
||||
host =
|
||||
port =
|
||||
name =
|
||||
user =
|
||||
password =
|
||||
debug = 0
|
||||
pool_size = 0
|
||||
@@ -1,2 +1,2 @@
|
||||
[SYNCER]
|
||||
[dispatcher]
|
||||
loop_interval = 1
|
||||
@@ -1,3 +1,2 @@
|
||||
[eth]
|
||||
provider = http://localhost:8545
|
||||
gas_gifter_minimum_balance = 10000000000000000000000
|
||||
@@ -1,4 +1,5 @@
|
||||
[redis]
|
||||
host = localhost
|
||||
port = 63379
|
||||
port = 6379
|
||||
db = 0
|
||||
timeout = 20.0
|
||||
3
apps/cic-eth/cic_eth/data/config/retry.ini
Normal file
3
apps/cic-eth/cic_eth/data/config/retry.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[retry]
|
||||
delay =
|
||||
batch_size =
|
||||
2
apps/cic-eth/cic_eth/data/config/signer.ini
Normal file
2
apps/cic-eth/cic_eth/data/config/signer.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[signer]
|
||||
provider =
|
||||
4
apps/cic-eth/cic_eth/data/config/syncer.ini
Normal file
4
apps/cic-eth/cic_eth/data/config/syncer.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
offset = 0
|
||||
no_history = 0
|
||||
@@ -8,7 +8,8 @@ Create Date: 2021-04-02 18:30:55.398388
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from chainqueue.db.migrations.sqlalchemy import (
|
||||
#from chainqueue.db.migrations.sqlalchemy import (
|
||||
from chainqueue.db.migrations.default.export import (
|
||||
chainqueue_upgrade,
|
||||
chainqueue_downgrade,
|
||||
)
|
||||
|
||||
@@ -8,7 +8,8 @@ Create Date: 2021-04-02 18:36:44.459603
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
#from chainsyncer.db.migrations.sqlalchemy import (
|
||||
from chainsyncer.db.migrations.default.export import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
@@ -126,3 +126,4 @@ class SessionBase(Model):
|
||||
logg.debug('commit and destroy session {}'.format(session_key))
|
||||
session.commit()
|
||||
session.close()
|
||||
del SessionBase.localsessions[session_key]
|
||||
|
||||
@@ -23,7 +23,7 @@ from chainlib.error import JSONRPCException
|
||||
from eth_accounts_index.registry import AccountRegistry
|
||||
from eth_accounts_index import AccountsIndex
|
||||
from sarafu_faucet import MinterFaucet
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.sql.tx import cache_tx_dict
|
||||
|
||||
# local import
|
||||
from cic_eth_registry import CICRegistry
|
||||
@@ -300,20 +300,17 @@ def cache_gift_data(
|
||||
|
||||
session = self.create_session()
|
||||
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
tx['to'],
|
||||
ZERO_ADDRESS,
|
||||
ZERO_ADDRESS,
|
||||
0,
|
||||
0,
|
||||
session=session,
|
||||
)
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
@@ -342,18 +339,15 @@ def cache_account_data(
|
||||
tx_data = AccountsIndex.parse_add_request(tx['data'])
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
tx['to'],
|
||||
ZERO_ADDRESS,
|
||||
ZERO_ADDRESS,
|
||||
0,
|
||||
0,
|
||||
session=session,
|
||||
)
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
@@ -1,385 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import celery
|
||||
import web3
|
||||
from cic_registry import CICRegistry
|
||||
from cic_registry.chain import ChainSpec
|
||||
|
||||
# local imports
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db.models.convert import TxConvertTransfer
|
||||
from cic_eth.db.models.otx import Otx
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
from cic_eth.eth.task import sign_and_register_tx
|
||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||
from cic_eth.eth.token import TokenTxFactory
|
||||
from cic_eth.eth.factory import TxFactory
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
from cic_eth.eth.rpc import RpcClient
|
||||
|
||||
celery_app = celery.current_app
|
||||
#logg = celery_app.log.get_default_logger()
|
||||
logg = logging.getLogger()
|
||||
|
||||
contract_function_signatures = {
|
||||
'convert': 'f3898a97',
|
||||
'convert2': '569706eb',
|
||||
}
|
||||
|
||||
|
||||
class BancorTxFactory(TxFactory):
|
||||
|
||||
"""Factory for creating Bancor network transactions.
|
||||
"""
|
||||
def convert(
|
||||
self,
|
||||
source_token_address,
|
||||
destination_token_address,
|
||||
reserve_address,
|
||||
source_amount,
|
||||
minimum_return,
|
||||
chain_spec,
|
||||
fee_beneficiary='0x0000000000000000000000000000000000000000',
|
||||
fee_ppm=0,
|
||||
):
|
||||
"""Create a BancorNetwork "convert" transaction.
|
||||
|
||||
:param source_token_address: ERC20 contract address for token to convert from
|
||||
:type source_token_address: str, 0x-hex
|
||||
:param destination_token_address: ERC20 contract address for token to convert to
|
||||
:type destination_token_address: str, 0x-hex
|
||||
:param reserve_address: ERC20 contract address of Common reserve token
|
||||
:type reserve_address: str, 0x-hex
|
||||
:param source_amount: Amount of source tokens to convert
|
||||
:type source_amount: int
|
||||
:param minimum_return: Minimum amount of destination tokens to accept as result for conversion
|
||||
:type source_amount: int
|
||||
:return: Unsigned "convert" transaction in standard Ethereum format
|
||||
:rtype: dict
|
||||
"""
|
||||
network_contract = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
|
||||
network_gas = network_contract.gas('convert')
|
||||
tx_convert_buildable = network_contract.contract.functions.convert2(
|
||||
[
|
||||
source_token_address,
|
||||
source_token_address,
|
||||
reserve_address,
|
||||
destination_token_address,
|
||||
destination_token_address,
|
||||
],
|
||||
source_amount,
|
||||
minimum_return,
|
||||
fee_beneficiary,
|
||||
fee_ppm,
|
||||
)
|
||||
tx_convert = tx_convert_buildable.buildTransaction({
|
||||
'from': self.address,
|
||||
'gas': network_gas,
|
||||
'gasPrice': self.gas_price,
|
||||
'chainId': chain_spec.chain_id(),
|
||||
'nonce': self.next_nonce(),
|
||||
})
|
||||
return tx_convert
|
||||
|
||||
|
||||
def unpack_convert(data):
|
||||
f = data[2:10]
|
||||
if f != contract_function_signatures['convert2']:
|
||||
raise ValueError('Invalid convert data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
path = d[384:]
|
||||
source = path[64-40:64]
|
||||
destination = path[-40:]
|
||||
|
||||
amount = int(d[64:128], 16)
|
||||
min_return = int(d[128:192], 16)
|
||||
fee_recipient = d[192:256]
|
||||
fee = int(d[256:320], 16)
|
||||
return {
|
||||
'amount': amount,
|
||||
'min_return': min_return,
|
||||
'source_token': web3.Web3.toChecksumAddress('0x' + source),
|
||||
'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
|
||||
'fee_recipient': fee_recipient,
|
||||
'fee': fee,
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Kept for historical reference, it unpacks a convert call without fee parameters
|
||||
#def _unpack_convert_mint(data):
|
||||
# f = data[2:10]
|
||||
# if f != contract_function_signatures['convert2']:
|
||||
# raise ValueError('Invalid convert data ({})'.format(f))
|
||||
#
|
||||
# d = data[10:]
|
||||
# path = d[256:]
|
||||
# source = path[64-40:64]
|
||||
# destination = path[-40:]
|
||||
#
|
||||
# amount = int(d[64:128], 16)
|
||||
# min_return = int(d[128:192], 16)
|
||||
# return {
|
||||
# 'amount': amount,
|
||||
# 'min_return': min_return,
|
||||
# 'source_token': web3.Web3.toChecksumAddress('0x' + source),
|
||||
# 'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
|
||||
# }
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def convert_with_default_reserve(self, tokens, from_address, source_amount, minimum_return, to_address, chain_str):
|
||||
"""Performs a conversion between two liquid tokens using Bancor network.
|
||||
|
||||
:param tokens: Token pair, source and destination respectively
|
||||
:type tokens: list of str, 0x-hex
|
||||
:param from_address: Ethereum address of sender
|
||||
:type from_address: str, 0x-hex
|
||||
:param source_amount: Amount of source tokens to convert
|
||||
:type source_amount: int
|
||||
:param minimum_return: Minimum about of destination tokens to receive
|
||||
:type minimum_return: int
|
||||
"""
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
queue = self.request.delivery_info['routing_key']
|
||||
|
||||
c = RpcClient(chain_spec, holder_address=from_address)
|
||||
|
||||
cr = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
|
||||
source_token = CICRegistry.get_address(chain_spec, tokens[0]['address'])
|
||||
reserve_address = CICRegistry.get_contract(chain_spec, 'BNTToken', 'ERC20').address()
|
||||
|
||||
tx_factory = TokenTxFactory(from_address, c)
|
||||
|
||||
tx_approve_zero = tx_factory.approve(source_token.address(), cr.address(), 0, chain_spec)
|
||||
(tx_approve_zero_hash_hex, tx_approve_zero_signed_hex) = sign_and_register_tx(tx_approve_zero, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
|
||||
|
||||
tx_approve = tx_factory.approve(source_token.address(), cr.address(), source_amount, chain_spec)
|
||||
(tx_approve_hash_hex, tx_approve_signed_hex) = sign_and_register_tx(tx_approve, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
|
||||
|
||||
tx_factory = BancorTxFactory(from_address, c)
|
||||
tx_convert = tx_factory.convert(
|
||||
tokens[0]['address'],
|
||||
tokens[1]['address'],
|
||||
reserve_address,
|
||||
source_amount,
|
||||
minimum_return,
|
||||
chain_spec,
|
||||
)
|
||||
(tx_convert_hash_hex, tx_convert_signed_hex) = sign_and_register_tx(tx_convert, chain_str, queue, 'cic_eth.eth.bancor.otx_cache_convert')
|
||||
|
||||
# TODO: consider moving save recipient to async task / chain it before the tx send
|
||||
if to_address != None:
|
||||
save_convert_recipient(tx_convert_hash_hex, to_address, chain_str)
|
||||
|
||||
s = create_check_gas_and_send_task(
|
||||
[tx_approve_zero_signed_hex, tx_approve_signed_hex, tx_convert_signed_hex],
|
||||
chain_str,
|
||||
from_address,
|
||||
tx_approve_zero['gasPrice'] * tx_approve_zero['gas'],
|
||||
tx_hashes_hex=[tx_approve_hash_hex],
|
||||
queue=queue,
|
||||
)
|
||||
s.apply_async()
|
||||
return tx_convert_hash_hex
|
||||
|
||||
|
||||
#@celery_app.task()
|
||||
#def process_approval(tx_hash_hex):
|
||||
# t = session.query(TxConvertTransfer).query(TxConvertTransfer.approve_tx_hash==tx_hash_hex).first()
|
||||
# c = session.query(Otx).query(Otx.tx_hash==t.convert_tx_hash)
|
||||
# gas_limit = 8000000
|
||||
# gas_price = GasOracle.gas_price()
|
||||
#
|
||||
# # TODO: use celery group instead
|
||||
# s_queue = celery.signature(
|
||||
# 'cic_eth.queue.tx.create',
|
||||
# [
|
||||
# nonce,
|
||||
# c['address'], # TODO: check that this is in fact sender address
|
||||
# c['tx_hash'],
|
||||
# c['signed_tx'],
|
||||
# ]
|
||||
# )
|
||||
# s_queue.apply_async()
|
||||
#
|
||||
# s_check_gas = celery.signature(
|
||||
# 'cic_eth.eth.gas.check_gas',
|
||||
# [
|
||||
# c['address'],
|
||||
# [c['signed_tx']],
|
||||
# gas_limit * gas_price,
|
||||
# ]
|
||||
# )
|
||||
# s_send = celery.signature(
|
||||
# 'cic_eth.eth.tx.send',
|
||||
# [],
|
||||
# )
|
||||
#
|
||||
# s_set_sent = celery.signature(
|
||||
# 'cic_eth.queue.state.set_sent',
|
||||
# [False],
|
||||
# )
|
||||
# s_send.link(s_set_sent)
|
||||
# s_check_gas.link(s_send)
|
||||
# s_check_gas.apply_async()
|
||||
# return tx_hash_hex
|
||||
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
def save_convert_recipient(convert_hash, recipient_address, chain_str):
|
||||
"""Registers the recipient target for a convert-and-transfer operation.
|
||||
|
||||
:param convert_hash: Transaction hash of convert operation
|
||||
:type convert_hash: str, 0x-hex
|
||||
:param recipient_address: Address of consequtive transfer recipient
|
||||
:type recipient_address: str, 0x-hex
|
||||
"""
|
||||
session = SessionBase.create_session()
|
||||
t = TxConvertTransfer(convert_hash, recipient_address, chain_str)
|
||||
session.add(t)
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
def save_convert_transfer(convert_hash, transfer_hash):
|
||||
"""Registers that the transfer part of a convert-and-transfer operation has been executed.
|
||||
|
||||
:param convert_hash: Transaction hash of convert operation
|
||||
:type convert_hash: str, 0x-hex
|
||||
:param convert_hash: Transaction hash of transfer operation
|
||||
:type convert_hash: str, 0x-hex
|
||||
:returns: transfer_hash,
|
||||
:rtype: list, single str, 0x-hex
|
||||
"""
|
||||
session = SessionBase.create_session()
|
||||
t = TxConvertTransfer.get(convert_hash)
|
||||
t.transfer(transfer_hash)
|
||||
session.add(t)
|
||||
session.commit()
|
||||
session.close()
|
||||
return [transfer_hash]
|
||||
|
||||
|
||||
# TODO: seems unused, consider removing
|
||||
@celery_app.task()
|
||||
def resolve_converters_by_tokens(tokens, chain_str):
|
||||
"""Return converters for a list of tokens.
|
||||
|
||||
:param tokens: Token addresses to look up
|
||||
:type tokens: list of str, 0x-hex
|
||||
:return: Addresses of matching converters
|
||||
:rtype: list of str, 0x-hex
|
||||
"""
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
for t in tokens:
|
||||
c = CICRegistry.get_contract(chain_spec, 'ConverterRegistry')
|
||||
fn = c.function('getConvertersByAnchors')
|
||||
try:
|
||||
converters = fn([t['address']]).call()
|
||||
except Exception as e:
|
||||
raise e
|
||||
t['converters'] = converters
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def transfer_converted(self, tokens, holder_address, receiver_address, value, tx_convert_hash_hex, chain_str):
|
||||
"""Execute the ERC20 transfer of a convert-and-transfer operation.
|
||||
|
||||
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||
|
||||
:param tokens: Token addresses
|
||||
:type tokens: list of str, 0x-hex
|
||||
:param holder_address: Token holder address
|
||||
:type holder_address: str, 0x-hex
|
||||
:param holder_address: Token receiver address
|
||||
:type holder_address: str, 0x-hex
|
||||
:param value: Amount of token, in 'wei'
|
||||
:type value: int
|
||||
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||
:return: Transaction hash
|
||||
:rtype: str, 0x-hex
|
||||
"""
|
||||
# we only allow one token, one transfer
|
||||
if len(tokens) != 1:
|
||||
raise TokenCountError
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
|
||||
queue = self.request.delivery_info['routing_key']
|
||||
|
||||
c = RpcClient(chain_spec, holder_address=holder_address)
|
||||
|
||||
# get transaction parameters
|
||||
gas_price = c.gas_price()
|
||||
tx_factory = TokenTxFactory(holder_address, c)
|
||||
|
||||
token_address = tokens[0]['address']
|
||||
tx_transfer = tx_factory.transfer(
|
||||
token_address,
|
||||
receiver_address,
|
||||
value,
|
||||
chain_spec,
|
||||
)
|
||||
(tx_transfer_hash_hex, tx_transfer_signed_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, 'cic_eth.eth.token.otx_cache_transfer')
|
||||
|
||||
# send transaction
|
||||
logg.info('transfer converted token {} from {} to {} value {} {}'.format(token_address, holder_address, receiver_address, value, tx_transfer_signed_hex))
|
||||
s = create_check_gas_and_send_task(
|
||||
[tx_transfer_signed_hex],
|
||||
chain_str,
|
||||
holder_address,
|
||||
tx_transfer['gasPrice'] * tx_transfer['gas'],
|
||||
None,
|
||||
queue,
|
||||
)
|
||||
s_save = celery.signature(
|
||||
'cic_eth.eth.bancor.save_convert_transfer',
|
||||
[
|
||||
tx_convert_hash_hex,
|
||||
tx_transfer_hash_hex,
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
s_save.link(s)
|
||||
s_save.apply_async()
|
||||
return tx_transfer_hash_hex
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
def otx_cache_convert(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
chain_str,
|
||||
):
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx_data = unpack_convert(tx['data'])
|
||||
logg.debug('tx data {}'.format(tx_data))
|
||||
|
||||
session = TxCache.create_session()
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
tx['from'],
|
||||
tx_data['source_token'],
|
||||
tx_data['destination_token'],
|
||||
tx_data['amount'],
|
||||
tx_data['amount'],
|
||||
)
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
session.close()
|
||||
return tx_hash_hex
|
||||
|
||||
@@ -13,9 +13,9 @@ from chainlib.eth.tx import (
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.erc20 import ERC20Token
|
||||
from hexathon import strip_0x
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.error import NotLocalTxError
|
||||
from eth_erc20 import ERC20
|
||||
from chainqueue.sql.tx import cache_tx_dict
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
@@ -375,19 +375,16 @@ def cache_transfer_data(
|
||||
token_value = tx_data[1]
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
recipient_address,
|
||||
tx['to'],
|
||||
tx['to'],
|
||||
token_value,
|
||||
token_value,
|
||||
session=session,
|
||||
)
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': recipient_address,
|
||||
'source_token': tx['to'],
|
||||
'destination_token': tx['to'],
|
||||
'from_value': token_value,
|
||||
'to_value': token_value,
|
||||
}
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
@@ -417,19 +414,16 @@ def cache_transfer_from_data(
|
||||
token_value = tx_data[2]
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
recipient_address,
|
||||
tx['to'],
|
||||
tx['to'],
|
||||
token_value,
|
||||
token_value,
|
||||
session=session,
|
||||
)
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': recipient_address,
|
||||
'source_token': tx['to'],
|
||||
'destination_token': tx['to'],
|
||||
'from_value': token_value,
|
||||
'to_value': token_value,
|
||||
}
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
@@ -458,19 +452,16 @@ def cache_approve_data(
|
||||
token_value = tx_data[1]
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
recipient_address,
|
||||
tx['to'],
|
||||
tx['to'],
|
||||
token_value,
|
||||
token_value,
|
||||
session=session,
|
||||
)
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': recipient_address,
|
||||
'source_token': tx['to'],
|
||||
'destination_token': tx['to'],
|
||||
'from_value': token_value,
|
||||
'to_value': token_value,
|
||||
}
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import is_checksum_address
|
||||
from chainlib.connection import RPCConnection
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.sql.tx import cache_tx_dict
|
||||
from chainlib.eth.gas import (
|
||||
balance,
|
||||
price,
|
||||
@@ -133,20 +134,17 @@ def cache_gas_data(
|
||||
|
||||
session = SessionBase.create_session()
|
||||
|
||||
tx_cache = TxCache(
|
||||
tx_hash_hex,
|
||||
tx['from'],
|
||||
tx['to'],
|
||||
ZERO_ADDRESS,
|
||||
ZERO_ADDRESS,
|
||||
tx['value'],
|
||||
tx['value'],
|
||||
session=session,
|
||||
)
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'from_value': tx['value'],
|
||||
'to_value': tx['value'],
|
||||
}
|
||||
|
||||
session.add(tx_cache)
|
||||
session.commit()
|
||||
cache_id = tx_cache.id
|
||||
(tx_dict, cache_id) = cache_tx_dict(tx_dict, session=session)
|
||||
session.close()
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ from hexathon import (
|
||||
strip_0x,
|
||||
)
|
||||
from chainqueue.db.models.tx import Otx
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.error import NotLocalTxError
|
||||
from potaahto.symbols import snake_and_camel
|
||||
|
||||
@@ -12,15 +12,21 @@ from chainlib.eth.tx import (
|
||||
transaction_by_block,
|
||||
receipt,
|
||||
)
|
||||
from chainlib.eth.error import RequestMismatchException
|
||||
from chainlib.eth.block import block_by_number
|
||||
from chainlib.eth.contract import abi_decode_single
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.eth.tx import Tx
|
||||
from hexathon import strip_0x
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.erc20 import ERC20Token
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.db.enum import StatusEnum
|
||||
from chainqueue.sql.query import get_tx_cache
|
||||
from eth_erc20 import ERC20
|
||||
from erc20_faucet import Faucet
|
||||
from potaahto.symbols import snake_and_camel
|
||||
|
||||
# local imports
|
||||
from cic_eth.queue.time import tx_times
|
||||
@@ -33,6 +39,32 @@ logg = logging.getLogger()
|
||||
MAX_BLOCK_TX = 250
|
||||
|
||||
|
||||
def parse_transaction(chain_spec, rpc, tx, sender_address=None):
|
||||
try:
|
||||
transfer_data = ERC20.parse_transfer_request(tx['input'])
|
||||
tx_address = transfer_data[0]
|
||||
tx_token_value = transfer_data[1]
|
||||
logg.debug('matched transfer transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
|
||||
return (tx_address, tx_token_value)
|
||||
except RequestMismatchException:
|
||||
pass
|
||||
|
||||
try:
|
||||
transfer_data = Faucet.parse_give_to_request(tx['input'])
|
||||
tx_address = transfer_data[0]
|
||||
c = Faucet(chain_spec)
|
||||
o = c.token_amount(tx['to'], sender_address=sender_address, height=tx['block_number'])
|
||||
r = rpc.do(o)
|
||||
tx_token_value = Faucet.parse_token_amount(r)
|
||||
logg.debug('matched giveto transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
|
||||
return (tx_address, tx_token_value)
|
||||
|
||||
except RequestMismatchException:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# TODO: Make this method easier to read
|
||||
@celery_app.task(bind=True, base=BaseTask)
|
||||
def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||
@@ -69,36 +101,39 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||
tx_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=tx_filter_data)
|
||||
|
||||
txs = {}
|
||||
logg.debug('processing filter with span low {} to high {}'.format(bloomspec['low'], bloomspec['high']))
|
||||
for block_height in range(bloomspec['low'], bloomspec['high']):
|
||||
block_height_bytes = block_height.to_bytes(4, 'big')
|
||||
if block_filter.check(block_height_bytes):
|
||||
logg.debug('filter matched block {}'.format(block_height))
|
||||
o = block_by_number(block_height)
|
||||
block = rpc.do(o)
|
||||
logg.debug('block {}'.format(block))
|
||||
|
||||
for tx_index in range(0, len(block['transactions'])):
|
||||
composite = tx_index + block_height
|
||||
tx_index_bytes = composite.to_bytes(4, 'big')
|
||||
if tx_filter.check(tx_index_bytes):
|
||||
tx_index_bytes = tx_index.to_bytes(4, 'big')
|
||||
composite = block_height_bytes + tx_index_bytes
|
||||
if tx_filter.check(composite):
|
||||
logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
|
||||
|
||||
o = transaction_by_block(block['hash'], tx_index)
|
||||
try:
|
||||
#tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
|
||||
o = transaction_by_block(block['hash'], tx_index)
|
||||
tx = rpc.do(o)
|
||||
except Exception as e:
|
||||
logg.debug('false positive on block {} tx {} ({})'.format(block_height, tx_index, e))
|
||||
continue
|
||||
|
||||
tx = Tx(tx).src()
|
||||
|
||||
logg.debug('got tx {}'.format(tx))
|
||||
tx_address = None
|
||||
tx_token_value = 0
|
||||
try:
|
||||
transfer_data = ERC20.parse_transfer_request(tx['data'])
|
||||
tx_address = transfer_data[0]
|
||||
tx_token_value = transfer_data[1]
|
||||
except ValueError:
|
||||
logg.debug('not a transfer transaction, skipping {}'.format(tx))
|
||||
|
||||
transfer_data = parse_transaction(chain_spec, rpc, tx, sender_address=BaseTask.call_address)
|
||||
if transfer_data == None:
|
||||
continue
|
||||
tx_address = transfer_data[0]
|
||||
tx_token_value = transfer_data[1]
|
||||
|
||||
if address == tx_address:
|
||||
status = StatusEnum.SENT
|
||||
try:
|
||||
@@ -114,9 +149,6 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||
|
||||
# TODO: pass through registry to validate declarator entry of token
|
||||
#token = registry.by_address(tx['to'], sender_address=self.call_address)
|
||||
token = ERC20Token(chain_spec, rpc, tx['to'])
|
||||
token_symbol = token.symbol
|
||||
token_decimals = token.decimals
|
||||
times = tx_times(tx['hash'], chain_spec)
|
||||
tx_r = {
|
||||
'hash': tx['hash'],
|
||||
@@ -126,12 +158,6 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||
'destination_value': tx_token_value,
|
||||
'source_token': tx['to'],
|
||||
'destination_token': tx['to'],
|
||||
'source_token_symbol': token_symbol,
|
||||
'destination_token_symbol': token_symbol,
|
||||
'source_token_decimals': token_decimals,
|
||||
'destination_token_decimals': token_decimals,
|
||||
'source_token_chain': chain_str,
|
||||
'destination_token_chain': chain_str,
|
||||
'nonce': tx['nonce'],
|
||||
}
|
||||
if times['queue'] != None:
|
||||
@@ -143,11 +169,12 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||
return txs
|
||||
|
||||
|
||||
|
||||
# TODO: Surely it must be possible to optimize this
|
||||
# TODO: DRY this with callback filter in cic_eth/runnable/manager
|
||||
# TODO: Remove redundant fields from end representation (timestamp, tx_hash)
|
||||
@celery_app.task()
|
||||
def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True):
|
||||
@celery_app.task(bind=True, base=BaseTask)
|
||||
def tx_collate(self, tx_batches, chain_spec_dict, offset, limit, newest_first=True, verify_contracts=True):
|
||||
"""Merges transaction data from multiple sources and sorts them in chronological order.
|
||||
|
||||
:param tx_batches: Transaction data inputs
|
||||
@@ -196,6 +223,32 @@ def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True):
|
||||
if newest_first:
|
||||
ks.reverse()
|
||||
for k in ks:
|
||||
txs.append(txs_by_block[k])
|
||||
tx = txs_by_block[k]
|
||||
if verify_contracts:
|
||||
try:
|
||||
tx = verify_and_expand(tx, chain_spec, sender_address=BaseTask.call_address)
|
||||
except UnknownContractError:
|
||||
logg.error('verify failed on tx {}, skipping'.format(tx['hash']))
|
||||
continue
|
||||
txs.append(tx)
|
||||
|
||||
return txs
|
||||
|
||||
|
||||
def verify_and_expand(tx, chain_spec, sender_address=ZERO_ADDRESS):
|
||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||
registry = CICRegistry(chain_spec, rpc)
|
||||
|
||||
if tx.get('source_token_symbol') == None and tx['source_token'] != ZERO_ADDRESS:
|
||||
r = registry.by_address(tx['source_token'], sender_address=sender_address)
|
||||
token = ERC20Token(chain_spec, rpc, tx['source_token'])
|
||||
tx['source_token_symbol'] = token.symbol
|
||||
tx['source_token_decimals'] = token.decimals
|
||||
|
||||
if tx.get('destination_token_symbol') == None and tx['destination_token'] != ZERO_ADDRESS:
|
||||
r = registry.by_address(tx['destination_token'], sender_address=sender_address)
|
||||
token = ERC20Token(chain_spec, rpc, tx['destination_token'])
|
||||
tx['destination_token_symbol'] = token.symbol
|
||||
tx['destination_token_decimals'] = token.decimals
|
||||
|
||||
return tx
|
||||
|
||||
@@ -27,7 +27,7 @@ def database_engine(
|
||||
SessionBase.poolable = False
|
||||
dsn = dsn_from_config(load_config)
|
||||
#SessionBase.connect(dsn, True)
|
||||
SessionBase.connect(dsn, debug=load_config.get('DATABASE_DEBUG') != None)
|
||||
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
|
||||
return dsn
|
||||
|
||||
|
||||
|
||||
@@ -100,6 +100,7 @@ def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, re
|
||||
q_outer = q_outer.join(Lock, isouter=True)
|
||||
q_outer = q_outer.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0))
|
||||
|
||||
|
||||
if not is_alive(status):
|
||||
SessionBase.release_session(session)
|
||||
raise ValueError('not a valid non-final tx value: {}'.format(status))
|
||||
|
||||
@@ -7,54 +7,30 @@ import json
|
||||
import argparse
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import confini
|
||||
import redis
|
||||
from xdg.BaseDirectory import xdg_config_home
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.api import Api
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger('create_account_script')
|
||||
logging.getLogger('confini').setLevel(logging.WARNING)
|
||||
logging.getLogger('gnupg').setLevel(logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
arg_flags = cic_eth.cli.argflag_std_base
|
||||
local_arg_flags = cic_eth.cli.argflag_local_taskcallback
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index')
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
|
||||
argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
|
||||
argparser.add_argument('--timeout', default=20.0, type=float, help='Callback timeout')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
|
||||
config_dir = args.c
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||
'REDIS_DB': getattr(args, 'redis_db'),
|
||||
extra_args = {
|
||||
'no_register': None,
|
||||
}
|
||||
config.dict_override(args_override, 'cli')
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
|
||||
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
def main():
|
||||
redis_host = config.get('REDIS_HOST')
|
||||
@@ -68,19 +44,24 @@ def main():
|
||||
ps.get_message()
|
||||
|
||||
api = Api(
|
||||
config.get('CIC_CHAIN_SPEC'),
|
||||
queue=args.q,
|
||||
callback_param='{}:{}:{}:{}'.format(args.redis_host_callback, args.redis_port_callback, redis_db, redis_channel),
|
||||
config.get('CHAIN_SPEC'),
|
||||
queue=config.get('CELERY_QUEUE'),
|
||||
callback_param='{}:{}:{}:{}'.format(config.get('_REDIS_HOST_CALLBACK'), config.get('_REDIS_PORT_CALLBACK'), config.get('REDIS_DB'), redis_channel),
|
||||
callback_task='cic_eth.callbacks.redis.redis',
|
||||
callback_queue=args.q,
|
||||
callback_queue=config.get('CELERY_QUEUE'),
|
||||
)
|
||||
|
||||
register = not args.no_register
|
||||
register = not config.get('_NO_REGISTER')
|
||||
logg.debug('register {}'.format(register))
|
||||
t = api.create_account(register=register)
|
||||
|
||||
ps.get_message()
|
||||
o = ps.get_message(timeout=args.timeout)
|
||||
try:
|
||||
o = ps.get_message(timeout=config.get('REDIS_TIMEOUT'))
|
||||
except TimeoutError as e:
|
||||
sys.stderr.write('got no new address from cic-eth before timeout: {}\n'.format(e))
|
||||
sys.exit(1)
|
||||
ps.unsubscribe()
|
||||
m = json.loads(o['data'])
|
||||
print(m['result'])
|
||||
|
||||
|
||||
@@ -12,64 +12,38 @@ from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.eth.address import is_checksum_address
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.api.admin import AdminApi
|
||||
from cic_eth.db.enum import LockEnum
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_format = 'terminal'
|
||||
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format')
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_task | cic_eth.cli.argflag_local_chain
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index')
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
|
||||
def process_lock_args(argparser):
|
||||
argparser.add_argument('flags', type=str, help='Flags to manipulate')
|
||||
argparser.add_argument('address', default=ZERO_ADDRESS, nargs='?', type=str, help='Ethereum address to unlock,')
|
||||
|
||||
sub = argparser.add_subparsers()
|
||||
sub = argparser.add_subparsers(help='')
|
||||
sub.dest = "command"
|
||||
sub_lock = sub.add_parser('lock', help='Set or reset locks')
|
||||
sub_unlock = sub.add_parser('unlock', help='Set or reset locks')
|
||||
process_lock_args(sub_lock)
|
||||
process_lock_args(sub_unlock)
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
}
|
||||
# override args
|
||||
config.dict_override(args_override, 'cli')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
queue = args.q
|
||||
|
||||
chain_spec = None
|
||||
if config.get('CIC_CHAIN_SPEC') != None and config.get('CIC_CHAIN_SPEC') != '::':
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
admin_api = AdminApi(None)
|
||||
|
||||
|
||||
@@ -100,7 +74,7 @@ def main():
|
||||
args.address,
|
||||
flags,
|
||||
],
|
||||
queue=queue,
|
||||
queue=config.get('CELERY_QUEUE'),
|
||||
)
|
||||
t = s.apply_async()
|
||||
logg.debug('unlock {} on {} task {}'.format(flags, args.address, t))
|
||||
@@ -119,7 +93,7 @@ def main():
|
||||
args.address,
|
||||
flags,
|
||||
],
|
||||
queue=queue,
|
||||
queue=config.get('CELERY_QUEUE'),
|
||||
)
|
||||
t = s.apply_async()
|
||||
logg.debug('lock {} on {} task {}'.format(flags, args.address, t))
|
||||
|
||||
@@ -8,8 +8,7 @@ import sys
|
||||
import re
|
||||
import datetime
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
# external imports
|
||||
import celery
|
||||
from cic_eth_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
@@ -24,7 +23,7 @@ from chainqueue.error import NotLocalTxError
|
||||
from chainqueue.sql.state import set_reserved
|
||||
|
||||
# local imports
|
||||
import cic_eth
|
||||
import cic_eth.cli
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.db import dsn_from_config
|
||||
@@ -39,57 +38,37 @@ from cic_eth.error import (
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_sync | cic_eth.cli.argflag_local_task
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', default='http://localhost:8545', dest='p', type=str, help='rpc provider')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
# override args
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
queue = args.q
|
||||
# connect to celery
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default')
|
||||
# set up rpc
|
||||
rpc = cic_eth.cli.RPC.from_config(config)
|
||||
conn = rpc.get_default()
|
||||
|
||||
run = True
|
||||
|
||||
|
||||
class DispatchSyncer:
|
||||
|
||||
yield_delay = 0.0005
|
||||
|
||||
def __init__(self, chain_spec):
|
||||
self.chain_spec = chain_spec
|
||||
self.session = None
|
||||
|
||||
|
||||
def chain(self):
|
||||
@@ -100,16 +79,18 @@ class DispatchSyncer:
|
||||
c = len(txs.keys())
|
||||
logg.debug('processing {} txs {}'.format(c, list(txs.keys())))
|
||||
chain_str = str(self.chain_spec)
|
||||
session = SessionBase.create_session()
|
||||
self.session = SessionBase.create_session()
|
||||
for k in txs.keys():
|
||||
tx_raw = txs[k]
|
||||
tx_raw_bytes = bytes.fromhex(strip_0x(tx_raw))
|
||||
tx = unpack(tx_raw_bytes, self.chain_spec)
|
||||
|
||||
try:
|
||||
set_reserved(self.chain_spec, tx['hash'], session=session)
|
||||
set_reserved(self.chain_spec, tx['hash'], session=self.session)
|
||||
self.session.commit()
|
||||
except NotLocalTxError as e:
|
||||
logg.warning('dispatcher was triggered with non-local tx {}'.format(tx['hash']))
|
||||
self.session.rollback()
|
||||
continue
|
||||
|
||||
s_check = celery.signature(
|
||||
@@ -132,16 +113,25 @@ class DispatchSyncer:
|
||||
s_check.link(s_send)
|
||||
t = s_check.apply_async()
|
||||
logg.info('processed {}'.format(k))
|
||||
self.session.close()
|
||||
self.session = None
|
||||
|
||||
|
||||
def loop(self, w3, interval):
|
||||
def loop(self, interval):
|
||||
while run:
|
||||
txs = {}
|
||||
typ = StatusBits.QUEUED
|
||||
utxs = get_upcoming_tx(self.chain_spec, typ)
|
||||
for k in utxs.keys():
|
||||
txs[k] = utxs[k]
|
||||
self.process(w3, txs)
|
||||
try:
|
||||
conn = RPCConnection.connect(self.chain_spec, 'default')
|
||||
self.process(conn, txs)
|
||||
except ConnectionError as e:
|
||||
if self.session != None:
|
||||
self.session.close()
|
||||
self.session = None
|
||||
logg.error('connection to node failed: {}'.format(e))
|
||||
|
||||
if len(utxs) > 0:
|
||||
time.sleep(self.yield_delay)
|
||||
@@ -151,8 +141,7 @@ class DispatchSyncer:
|
||||
|
||||
def main():
|
||||
syncer = DispatchSyncer(chain_spec)
|
||||
conn = RPCConnection.connect(chain_spec, 'default')
|
||||
syncer.loop(conn, float(config.get('DISPATCHER_LOOP_INTERVAL')))
|
||||
syncer.loop(float(config.get('DISPATCHER_LOOP_INTERVAL')))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
@@ -47,12 +48,13 @@ class GasFilter(SyncFilter):
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
address = to_checksum_address(r[0])
|
||||
logg.info('resuming gas-in-waiting txs for {}'.format(r[0]))
|
||||
if len(txs) > 0:
|
||||
s = create_check_gas_task(
|
||||
list(txs.values()),
|
||||
self.chain_spec,
|
||||
r[0],
|
||||
address,
|
||||
0,
|
||||
tx_hashes_hex=list(txs.keys()),
|
||||
queue=self.queue,
|
||||
|
||||
@@ -6,7 +6,6 @@ import argparse
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
import celery
|
||||
from cic_eth_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
@@ -14,6 +13,7 @@ from chainlib.connection import RPCConnection
|
||||
from chainsyncer.filter import SyncFilter
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.admin.ctrl import lock_send
|
||||
@@ -25,66 +25,41 @@ from cic_eth.stat import init_chain_stat
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_sync | cic_eth.cli.argflag_local_task
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.add_argument('--batch-size', dest='batch_size', type=int, default=50, help='max amount of txs to resend per iteration')
|
||||
argparser.add_argument('--retry-delay', dest='retry_delay', type=int, help='seconds to wait for retrying a transaction that is marked as sent')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
argparser.add_argument('--retry-delay', dest='retry_delay', type=int, default=20, help='seconds to wait for retrying a transaction that is marked as sent')
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
# override args
|
||||
args_override = {
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'),
|
||||
extra_args = {
|
||||
'retry_delay': 'RETRY_DELAY',
|
||||
'batch_size': 'RETRY_BATCH_SIZE',
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
config.add(args.batch_size, '_BATCH_SIZE', True)
|
||||
|
||||
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
queue = args.q
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default')
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
# set up rpc
|
||||
rpc = cic_eth.cli.RPC.from_config(config)
|
||||
conn = rpc.get_default()
|
||||
|
||||
|
||||
def main():
|
||||
conn = RPCConnection.connect(chain_spec, 'default')
|
||||
|
||||
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
|
||||
straggler_delay = int(config.get('RETRY_DELAY'))
|
||||
loop_interval = config.get('SYNCER_LOOP_INTERVAL')
|
||||
if loop_interval == None:
|
||||
stat = init_chain_stat(conn)
|
||||
loop_interval = stat.block_average()
|
||||
|
||||
syncer = RetrySyncer(conn, chain_spec, straggler_delay, batch_size=config.get('_BATCH_SIZE'))
|
||||
syncer = RetrySyncer(conn, chain_spec, cic_eth.cli.chain_interface, straggler_delay, batch_size=config.get('RETRY_BATCH_SIZE'))
|
||||
syncer.backend.set(0, 0)
|
||||
fltr = StragglerFilter(chain_spec, queue=queue)
|
||||
fltr = StragglerFilter(chain_spec, queue=config.get('CELERY_QUEUE'))
|
||||
syncer.add_filter(fltr)
|
||||
syncer.loop(int(loop_interval), conn)
|
||||
|
||||
|
||||
@@ -21,14 +21,17 @@ from chainlib.eth.connection import (
|
||||
EthUnixSignerConnection,
|
||||
EthHTTPSignerConnection,
|
||||
)
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from cic_eth_registry.erc20 import ERC20Token
|
||||
from hexathon import add_0x
|
||||
import liveness.linux
|
||||
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.eth import (
|
||||
erc20,
|
||||
tx,
|
||||
@@ -70,114 +73,53 @@ from cic_eth.task import BaseTask
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||
argparser.add_argument('-r', type=str, help='CIC registry address')
|
||||
arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_task
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
||||
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
# override args
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'CIC_DEFAULT_TOKEN_SYMBOL': getattr(args, 'default_token_symbol'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
|
||||
# process config
|
||||
extra_args = {
|
||||
'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||
'aux_all': None,
|
||||
'aux': None,
|
||||
'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
|
||||
}
|
||||
config.add(args.q, '_CELERY_QUEUE', True)
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
health_modules = config.get('CIC_HEALTH_MODULES', [])
|
||||
if len(health_modules) != 0:
|
||||
health_modules = health_modules.split(',')
|
||||
logg.debug('health mods {}'.format(health_modules))
|
||||
# connect to celery
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
# set up rpc
|
||||
rpc = cic_eth.cli.RPC.from_config(config, use_signer=True)
|
||||
conn = rpc.get_default()
|
||||
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
|
||||
# set up celery
|
||||
current_app = celery.Celery(__name__)
|
||||
|
||||
broker = config.get('CELERY_BROKER_URL')
|
||||
if broker[:4] == 'file':
|
||||
bq = tempfile.mkdtemp()
|
||||
bp = tempfile.mkdtemp()
|
||||
conf_update = {
|
||||
'broker_url': broker,
|
||||
'broker_transport_options': {
|
||||
'data_folder_in': bq,
|
||||
'data_folder_out': bq,
|
||||
'data_folder_processed': bp,
|
||||
},
|
||||
}
|
||||
if config.true('CELERY_DEBUG'):
|
||||
conf_update['result_extended'] = True
|
||||
current_app.conf.update(conf_update)
|
||||
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
|
||||
else:
|
||||
conf_update = {
|
||||
'broker_url': broker,
|
||||
}
|
||||
if config.true('CELERY_DEBUG'):
|
||||
conf_update['result_extended'] = True
|
||||
current_app.conf.update(conf_update)
|
||||
|
||||
result = config.get('CELERY_RESULT_URL')
|
||||
if result[:4] == 'file':
|
||||
rq = tempfile.mkdtemp()
|
||||
current_app.conf.update({
|
||||
'result_backend': 'file://{}'.format(rq),
|
||||
})
|
||||
logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
|
||||
else:
|
||||
current_app.conf.update({
|
||||
'result_backend': result,
|
||||
})
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, 'signer')
|
||||
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, 'signer')
|
||||
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, 'signer')
|
||||
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
|
||||
RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 'signer')
|
||||
|
||||
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
|
||||
|
||||
#import cic_eth.checks.gas
|
||||
#if not cic_eth.checks.gas.health(config=config):
|
||||
# raise RuntimeError()
|
||||
|
||||
# execute health checks
|
||||
# TODO: health should be separate service with endpoint that can be queried
|
||||
health_modules = config.get('CIC_HEALTH_MODULES', [])
|
||||
if len(health_modules) != 0:
|
||||
health_modules = health_modules.split(',')
|
||||
logg.debug('health mods {}'.format(health_modules))
|
||||
liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker')
|
||||
|
||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||
|
||||
# set up chain provisions
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
registry = None
|
||||
try:
|
||||
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
registry = connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
except UnknownContractError as e:
|
||||
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||
sys.exit(1)
|
||||
@@ -188,15 +130,15 @@ if trusted_addresses_src == None:
|
||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||
sys.exit(1)
|
||||
trusted_addresses = trusted_addresses_src.split(',')
|
||||
for address in trusted_addresses:
|
||||
for i, address in enumerate(trusted_addresses):
|
||||
if config.get('_UNSAFE'):
|
||||
trusted_addresses[i] = to_checksum_address(address)
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
connect_declarator(conn, chain_spec, trusted_addresses)
|
||||
connect_token_registry(conn, chain_spec)
|
||||
|
||||
connect_declarator(rpc, chain_spec, trusted_addresses)
|
||||
connect_token_registry(rpc, chain_spec)
|
||||
|
||||
# detect aux
|
||||
# detect auxiliary task modules (plugins)
|
||||
# TODO: move to separate file
|
||||
#aux_dir = os.path.join(script_dir, '..', '..', 'aux')
|
||||
aux = []
|
||||
if args.aux_all:
|
||||
if len(args.aux) > 0:
|
||||
@@ -249,36 +191,24 @@ elif len(args.aux) > 0:
|
||||
for v in aux:
|
||||
mname = 'cic_eth_aux.' + v
|
||||
mod = importlib.import_module(mname)
|
||||
mod.aux_setup(rpc, config)
|
||||
mod.aux_setup(conn, config)
|
||||
logg.info('loaded aux module {}'.format(mname))
|
||||
|
||||
|
||||
def main():
|
||||
argv = ['worker']
|
||||
if args.vv:
|
||||
argv.append('--loglevel=DEBUG')
|
||||
elif args.v:
|
||||
argv.append('--loglevel=INFO')
|
||||
log_level = logg.getEffectiveLevel()
|
||||
log_level_name = logging.getLevelName(log_level)
|
||||
argv.append('--loglevel=' + log_level_name)
|
||||
argv.append('-Q')
|
||||
argv.append(args.q)
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
argv.append('-n')
|
||||
argv.append(args.q)
|
||||
|
||||
# if config.true('SSL_ENABLE_CLIENT'):
|
||||
# Callback.ssl = True
|
||||
# Callback.ssl_cert_file = config.get('SSL_CERT_FILE')
|
||||
# Callback.ssl_key_file = config.get('SSL_KEY_FILE')
|
||||
# Callback.ssl_password = config.get('SSL_PASSWORD')
|
||||
#
|
||||
# if config.get('SSL_CA_FILE') != '':
|
||||
# Callback.ssl_ca_file = config.get('SSL_CA_FILE')
|
||||
|
||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
|
||||
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
||||
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address)
|
||||
default_token.load(rpc)
|
||||
default_token = ERC20Token(chain_spec, conn, BaseTask.default_token_address)
|
||||
default_token.load(conn)
|
||||
BaseTask.default_token_decimals = default_token.decimals
|
||||
BaseTask.default_token_name = default_token.name
|
||||
|
||||
@@ -286,13 +216,13 @@ def main():
|
||||
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))
|
||||
|
||||
liveness.linux.set(rundir=config.get('CIC_RUN_DIR'))
|
||||
current_app.worker_main(argv)
|
||||
celery_app.worker_main(argv)
|
||||
liveness.linux.reset(rundir=config.get('CIC_RUN_DIR'))
|
||||
|
||||
|
||||
@celery.signals.eventlet_pool_postshutdown.connect
|
||||
def shutdown(sender=None, headers=None, body=None, **kwargs):
|
||||
logg.warning('in shudown event hook')
|
||||
logg.warning('in shutdown event hook')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -8,14 +8,6 @@ import sys
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
import celery
|
||||
import rlp
|
||||
import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
@@ -30,8 +22,13 @@ from chainsyncer.backend.sql import SQLBackend
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainsyncer.driver.history import HistorySyncer
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
from chainlib.eth.address import (
|
||||
is_checksum_address,
|
||||
to_checksum_address,
|
||||
)
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.runnable.daemons.filters import (
|
||||
CallbackFilter,
|
||||
@@ -47,61 +44,50 @@ from cic_eth.registry import (
|
||||
connect_token_registry,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_sync
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
def add_block_args(argparser):
|
||||
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
|
||||
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
return argparser
|
||||
# process config
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to celery
|
||||
cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
logg = cic_base.log.create()
|
||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
|
||||
args = cic_base.argparse.parse(argparser, logg)
|
||||
|
||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||
|
||||
config.add(args.y, '_KEYSTORE_FILE', True)
|
||||
config.add(args.q, '_CELERY_QUEUE', True)
|
||||
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
|
||||
config.add(args.no_history, '_NO_HISTORY', True)
|
||||
|
||||
cic_base.config.log(config)
|
||||
|
||||
# set up database
|
||||
dsn = dsn_from_config(config)
|
||||
|
||||
SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
# set up rpc
|
||||
rpc = cic_eth.cli.RPC.from_config(config)
|
||||
conn = rpc.get_default()
|
||||
|
||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||
|
||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||
# set up chain provisions
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
registry = None
|
||||
try:
|
||||
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
registry = connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
except UnknownContractError as e:
|
||||
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||
sys.exit(1)
|
||||
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||
|
||||
|
||||
def main():
|
||||
# connect to celery
|
||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
# Connect to blockchain with chainlib
|
||||
|
||||
o = block_latest()
|
||||
r = rpc.do(o)
|
||||
r = conn.do(o)
|
||||
block_current = int(r, 16)
|
||||
block_offset = block_current + 1
|
||||
|
||||
loop_interval = config.get('SYNCER_LOOP_INTERVAL')
|
||||
if loop_interval == None:
|
||||
stat = init_chain_stat(rpc, block_start=block_current)
|
||||
stat = init_chain_stat(conn, block_start=block_current)
|
||||
loop_interval = stat.block_average()
|
||||
|
||||
logg.debug('current block height {}'.format(block_offset))
|
||||
@@ -113,9 +99,9 @@ def main():
|
||||
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||
|
||||
if len(syncer_backends) == 0:
|
||||
initial_block_start = config.get('SYNCER_HISTORY_START')
|
||||
initial_block_start = config.get('SYNCER_OFFSET')
|
||||
initial_block_offset = block_offset
|
||||
if config.get('_NO_HISTORY'):
|
||||
if config.true('SYNCER_NO_HISTORY'):
|
||||
initial_block_start = block_offset
|
||||
initial_block_offset += 1
|
||||
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||
@@ -128,40 +114,45 @@ def main():
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
try:
|
||||
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HistorySyncer(syncer_backend, cic_eth.cli.chain_interface))
|
||||
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
||||
except AttributeError:
|
||||
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
||||
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HeadSyncer(syncer_backend, cic_eth.cli.chain_interface))
|
||||
|
||||
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||
sys.exit(1)
|
||||
trusted_addresses = trusted_addresses_src.split(',')
|
||||
for address in trusted_addresses:
|
||||
for i, address in enumerate(trusted_addresses):
|
||||
if not config.get('_UNSAFE'):
|
||||
if not is_checksum_address(address):
|
||||
raise ValueError('address {} is not a valid checksum address'.format(address))
|
||||
else:
|
||||
trusted_addresses[i] = to_checksum_address(address)
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
connect_declarator(rpc, chain_spec, trusted_addresses)
|
||||
connect_token_registry(rpc, chain_spec)
|
||||
connect_declarator(conn, chain_spec, trusted_addresses)
|
||||
connect_token_registry(conn, chain_spec)
|
||||
CallbackFilter.trusted_addresses = trusted_addresses
|
||||
|
||||
callback_filters = []
|
||||
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
|
||||
task_split = cb.split(':')
|
||||
task_queue = config.get('_CELERY_QUEUE')
|
||||
task_queue = config.get('CELERY_QUEUE')
|
||||
if len(task_split) > 1:
|
||||
task_queue = task_split[0]
|
||||
callback_filter = CallbackFilter(chain_spec, task_split[1], task_queue)
|
||||
callback_filters.append(callback_filter)
|
||||
|
||||
tx_filter = TxFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||
tx_filter = TxFilter(chain_spec, config.get('CELERY_QUEUE'))
|
||||
|
||||
account_registry_address = registry.by_name('AccountRegistry')
|
||||
registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('_CELERY_QUEUE'))
|
||||
registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('CELERY_QUEUE'))
|
||||
|
||||
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||
gas_filter = GasFilter(chain_spec, config.get('CELERY_QUEUE'))
|
||||
|
||||
#transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE'))
|
||||
|
||||
@@ -176,7 +167,7 @@ def main():
|
||||
for cf in callback_filters:
|
||||
syncer.add_filter(cf)
|
||||
|
||||
r = syncer.loop(int(loop_interval), rpc)
|
||||
r = syncer.loop(int(loop_interval), conn)
|
||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||
|
||||
i += 1
|
||||
|
||||
@@ -12,50 +12,27 @@ import confini
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
import cic_eth.cli
|
||||
from cic_eth.api import Api
|
||||
from cic_eth.api.admin import AdminApi
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_format = 'terminal'
|
||||
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
arg_flags = cic_eth.cli.argflag_std_base
|
||||
local_arg_flags = cic_eth.cli.argflag_local_taskcallback
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli args')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
queue = args.q
|
||||
|
||||
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
|
||||
api = Api(config.get('CHAIN_SPEC'), queue=config.get('CELERY_QUEUE'))
|
||||
admin_api = AdminApi(None)
|
||||
|
||||
|
||||
def main():
|
||||
t = admin_api.registry()
|
||||
registry_address = t.get()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user