Compare commits
382 Commits
lash/sover
...
lash/task-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31268c0b47
|
||
| 8a9d2ee0be | |||
| 3608fd1fc7 | |||
| 0d275f358b | |||
|
3aef2aa65f
|
|||
| 5644baefb2 | |||
|
1a7c4deab6
|
|||
| 0389d8623d | |||
|
cf64387d81
|
|||
| 79bcc8a9f1 | |||
| 7b57f1b4c2 | |||
| 76b8519637 | |||
| e89aec76fa | |||
| a138a0ec75 | |||
| 5128c7828c | |||
| 2f005195e5 | |||
| fb8db3ffd2 | |||
| b5f647c4aa | |||
| 6019143ba1 | |||
| 610440b722 | |||
|
|
d65455fc29 | ||
| 43f8d1c30c | |||
| b855211eed | |||
| 1e0c475f39 | |||
|
|
3e6cf594e3 | ||
| b8f79a2dd1 | |||
| 540c2fd950 | |||
| b9b06eced8 | |||
| 949bb29379 | |||
|
|
0468906601 | ||
|
|
471243488e
|
||
| 3c4acd82ff | |||
| e07f992c5a | |||
| 17e95cb19c | |||
| 3c3a97ce15 | |||
| a492be4927 | |||
|
|
1f555748b0 | ||
| 8aa4d20eea | |||
|
|
90cf24dcee | ||
|
|
75b711dbd5 | ||
| c21c1eb2ef | |||
| eb5e612105 | |||
| e017d11770 | |||
| e327af68e1 | |||
| 92cc6a3f27 | |||
| f42bf7754a | |||
|
|
7342927e91
|
||
| 17333af88f | |||
| 6a68d2ed32 | |||
|
|
ef77f4c99a | ||
|
|
56dbe8a502 | ||
|
|
2dc8ac6a12 | ||
|
|
0ced68e224 | ||
| 2afb20e715 | |||
| 3b0113d0e4 | |||
|
|
ebf4743a84 | ||
|
|
3bf92e7a8a | ||
| f0b4c42c68 | |||
|
|
b62d00180c | ||
|
|
a49978cc36 | ||
| 1b0ee269d0 | |||
| aa2f363b27 | |||
| 2a24ce6938 | |||
| 938a10b5c3 | |||
|
|
76e33e578b | ||
|
|
2ec4262734 | ||
|
|
7684fe3883 | ||
|
|
995a148c6a | ||
|
|
511e099689 | ||
|
|
f877218c55 | ||
| 8ac9a1e99a | |||
| c4cb095a29 | |||
| 05b8bbbbca | |||
| 1ce32fbbe0 | |||
| 3fd5e77e2c | |||
| e27a49ef33 | |||
|
|
fffb2bc3f4 | ||
|
|
8910fb0759 | ||
|
|
c84239c820 | ||
|
|
452047b900 | ||
|
|
b8be457c41 | ||
|
|
0ec9813e5f | ||
|
|
defa7797dc | ||
|
|
bb3d38a1f9 | ||
| 3be1c1b33d | |||
|
|
d6c763f2d7 | ||
| b7942ddcfa | |||
|
8de5dc1540
|
|||
| fad0a4b580 | |||
| 0672a17d2e | |||
| f764b73f66 | |||
| 806b82504f | |||
| ac76e14129 | |||
| 1c78f4d6d6 | |||
| 0d6e228f8a | |||
| 7a3cb7ab75 | |||
| 992c7b4022 | |||
| f19173001e | |||
|
|
f82bb4515d | ||
| 24e6db7d87 | |||
| ecdfb9bc5a | |||
| 30415ac997 | |||
| d5a8b77349 | |||
|
|
ed2521b582 | ||
|
|
395930106a | ||
|
|
ee1452e530 | ||
|
|
8cdaf9f28a
|
||
|
|
402b968b6d | ||
|
|
aa13517534 | ||
|
|
884b18f2f1 | ||
|
|
494a8f3e88 | ||
|
|
1214f605a7 | ||
|
|
0783a6001c | ||
|
|
f9594b766a | ||
|
|
561ae62d5e | ||
|
|
d6782abbcc | ||
|
|
8f173fa30b | ||
|
|
3741cb3283 | ||
|
|
54dd5acb62 | ||
| c52885a016 | |||
| f0dd257e05 | |||
| e8c870d230 | |||
| 4bb36a448d | |||
| 231163e2fc | |||
| e599933ef8 | |||
| 266bc3362d | |||
| 8350381754 | |||
| 6ddeacf036 | |||
| fe017d2b0f | |||
| d7973436e6 | |||
| 5025c31af6 | |||
|
|
021c736707 | ||
|
|
e3028a5060 | ||
|
|
5f6c57647f | ||
|
|
ed029a936c | ||
|
|
c559bb2fee | ||
|
|
9b79034ed3 | ||
| b7d5c6799f | |||
| eef8bb2cf7 | |||
| cf96fee430 | |||
| 9740963431 | |||
|
|
a3c4932488 | ||
|
|
aa667951be | ||
|
|
c2459cfd65 | ||
|
|
e7102ff02d | ||
| a942c785f6 | |||
| 70704b09ec | |||
|
|
a075c55957 | ||
|
|
6464f651ec | ||
|
|
5145282946 | ||
|
|
1e87f2ed31
|
||
|
|
c852f41d76 | ||
|
|
f8e68cff96 | ||
| 7027d77836 | |||
| d356f8167d | |||
| 753d21fe95 | |||
| 3b6e031746 | |||
| b1d5d45eef | |||
| 53317cb912 | |||
| 18382a1f35 | |||
| 29e91fafab | |||
| 5b20a9a24a | |||
| a252195bdc | |||
|
|
f1be3b633c | ||
|
|
e59a71188c
|
||
| 1d0eb06f2f | |||
| 57127132b5 | |||
| 0bf2c35fcd | |||
| d046595764 | |||
|
9dd7ec88fd
|
|||
| 282fd2ff52 | |||
| 8f85598861 | |||
| 8529c349ca | |||
| 4368d2bf59 | |||
| da3c812bf5 | |||
| 82b1e87462 | |||
| e13c423daf | |||
| 56b3bd751d | |||
| 4f41c5bacf | |||
| 07583f0c3b | |||
| 0ae912082c | |||
| 094f4d4298 | |||
|
|
9471b1d8ab | ||
|
|
57100366d8 | ||
| 71e0973020 | |||
|
|
12ab5c2f66 | ||
| a804552620 | |||
| 0319fa6076 | |||
| 91dfc51d54 | |||
| 4fd861f080 | |||
|
|
28de7a4eac | ||
|
|
a31e79b0f7 | ||
|
eb2f71aee0
|
|||
|
|
e5b1352970 | ||
|
|
89b90da5d2 | ||
| 9607994c31 | |||
| 0da617d29e | |||
| 56bcad16a5 | |||
| 77d9936e39 | |||
|
|
72aeefc78b | ||
|
|
fab9b0c520
|
||
| 9566f8c8e2 | |||
|
007d7a5121
|
|||
| fc20849aff | |||
| 1605e53216 | |||
| 200fdf0e3c | |||
| 022db04198 | |||
| 1c17048981 | |||
|
|
04c0963f33 | ||
|
|
096ed9bc27 | ||
| 1a931eced4 | |||
| ed9e032890 | |||
|
|
69ae9b7c07 | ||
|
|
634d3fb401 | ||
|
|
65f722b291 | ||
|
|
0ad0f9981c | ||
|
|
5fb0f4a2e9 | ||
|
|
41a96b5584
|
||
|
|
d0f2bc0120 | ||
|
|
e2946052e0 | ||
|
|
546d69f1e9 | ||
|
|
fbf7351238 | ||
|
|
b886384fa8 | ||
|
|
277033f3b5 | ||
| 4ae094fd30 | |||
| cb239f112a | |||
|
|
d971a6eded | ||
|
|
b0a6df0177 | ||
|
|
92c9df4e19 | ||
|
|
9c49d568e0 | ||
|
|
d7113f3923 | ||
|
|
c569fe4b17 | ||
| 1c650df27d | |||
| a31b7bc9cd | |||
|
|
78ff58c1a2 | ||
| 1676addbeb | |||
| 1efc25ac15 | |||
|
|
db2ec0dcfa | ||
| 5148e6428b | |||
|
|
0c186ed968 | ||
|
|
c44439bd90 | ||
|
|
0411603078 | ||
| eee895ea71 | |||
|
|
a5ca898532 | ||
|
|
6d8508aebf | ||
|
|
f8f66984d2 | ||
|
|
0f02dd1b7c | ||
| 63a4a82ab0 | |||
|
949c1070a9
|
|||
| 5d9fbe9b64 | |||
| 873a3f082a | |||
| 7b408cf564 | |||
|
|
9dfbd7034c | ||
|
|
235f5cede8 | ||
|
|
0a59539f9a | ||
|
|
60b36945df | ||
| dae6526677 | |||
|
1e94a516c2
|
|||
| e8512ebbae | |||
| f2c955c60b | |||
| 17b3b27d81 | |||
| 1cb172b8bf | |||
|
|
9d47e4c764 | ||
|
|
c68cc318ab | ||
|
|
af99ac823a | ||
|
|
06652eb30f | ||
|
|
f66f913307 | ||
|
|
8bf1364864
|
||
| 0d6d7179eb | |||
|
e7f48f3ce0
|
|||
|
|
b252fab018 | ||
|
|
4667916d80
|
||
| 1f668384cc | |||
| 123dc55687 | |||
|
|
0b4d8d5937
|
||
|
|
ed6bef4052 | ||
|
|
6a8a356f09 | ||
| 5ec0b67496 | |||
| 7d935bcbc3 | |||
| fd69a3c6bb | |||
|
|
298bcf89e5 | ||
|
|
5d3d773f41 | ||
|
|
e71b2411d0 | ||
|
|
b4bfb76634 | ||
| aab5c8bf85 | |||
| e1564574f7 | |||
| 13253a2dcc | |||
| 9020fe1000 | |||
| a2e7d2973c | |||
| 82f650e81d | |||
| e77940d0de | |||
| 1df62717ef | |||
| c4919d56b1 | |||
| 6d44863a49 | |||
|
|
b02cdee1bd | ||
|
|
75bf8f15be | ||
| 8db76dc0a8 | |||
| a3261f2f0e | |||
| 850dd15451 | |||
| 0c56e84704 | |||
| 63cd8a4aab | |||
|
|
2c326f62ae | ||
| 9ed62c58ae | |||
|
|
04e9f45feb
|
||
|
|
9126a75c4a | ||
|
|
1bc29588a1 | ||
| e6d57d3bbb | |||
| f64ff1290c | |||
|
|
d5cbe9d113 | ||
|
|
5663741ed4 | ||
|
|
0f6615a925 | ||
|
|
aa15353d68 | ||
|
|
f7a69830ba | ||
|
|
7428420cda | ||
|
|
7504a899a1 | ||
|
|
c20c5af27c | ||
|
|
32b72274f5 | ||
|
|
f50da54274 | ||
|
|
dd94b8a190 | ||
|
|
16dd210965 | ||
|
|
cd0e702e3a | ||
|
|
cfab16f4a9 | ||
|
|
60fdb06034 | ||
|
|
3129a78e06 | ||
|
|
6b6ec8659b | ||
|
|
96e755b54d
|
||
|
|
f38458ff4c | ||
|
|
660d524401 | ||
|
|
1bc7cde1f0 | ||
|
|
9c22ffca38 | ||
|
|
39fe4a14ec | ||
|
|
65250196cc
|
||
|
|
0123ce13ea | ||
|
|
03b3e8cd3f | ||
|
|
3ee84f780e | ||
|
|
95269f69ed | ||
| 621780e9b6 | |||
| eecdca1a55 | |||
| 6fef0ecec9 | |||
|
|
6b89a2da89 | ||
|
|
254f2a266b | ||
| ba18914498 | |||
| f410e8b7e3 | |||
| 01454c9ac0 | |||
| 462d7046ed | |||
| f91b491251 | |||
| 0de79521dc | |||
|
|
22ec8e2e0e
|
||
|
|
a8529ae2ef | ||
|
|
98ddf56a1d | ||
| bee602b16a | |||
| c67274846f | |||
|
|
48570b2338 | ||
|
|
c80b8771b9 | ||
|
|
6c6db7bc7b | ||
|
|
bb941acd7e
|
||
|
|
7dee7de26e | ||
|
|
7b16a36a62 | ||
|
|
5a4e0b8eba | ||
|
|
226699568f | ||
|
|
ec2b0e56e5 | ||
|
|
6ffaca5207
|
||
|
|
5c6375c9ec | ||
|
|
99f55f01ed | ||
|
|
086308fdb8 | ||
|
|
f8f74a17f6
|
||
| fd629cdc51 | |||
| e9fb80ab78 | |||
|
|
7728f38f14 | ||
|
|
a305aafc86 | ||
|
|
9e6bb2acb2 | ||
|
|
a7ab2e3f3f | ||
|
|
1f2fc3e952 | ||
|
|
a9258c3085 | ||
|
|
1a97f1e97d | ||
|
|
fc59e24c80 | ||
|
|
68bdadcdf1 | ||
|
|
810f9fe994 | ||
|
|
7762020186 | ||
|
|
462933d8ae | ||
|
|
3f8d7fc10a | ||
|
|
426e46c791 |
15
.gitignore
vendored
15
.gitignore
vendored
@@ -1,2 +1,17 @@
|
|||||||
service-configs/*
|
service-configs/*
|
||||||
!service-configs/.gitkeep
|
!service-configs/.gitkeep
|
||||||
|
**/node_modules/
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
*.o
|
||||||
|
gmon.out
|
||||||
|
*.egg-info
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
**/*sqlite
|
||||||
|
**/.nyc_output
|
||||||
|
**/coverage
|
||||||
|
**/.venv
|
||||||
|
.idea
|
||||||
|
**/.vim
|
||||||
|
**/*secret.yaml
|
||||||
|
|||||||
@@ -1,13 +1,43 @@
|
|||||||
include:
|
include:
|
||||||
- local: 'ci_templates/.cic-template.yml'
|
#- local: 'ci_templates/.cic-template.yml' #kaniko build templates
|
||||||
- local: 'apps/contract-migration/.gitlab-ci.yml'
|
# these includes are app specific unit tests
|
||||||
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
||||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||||
|
#- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||||
|
#- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- build
|
- build
|
||||||
- test
|
- test
|
||||||
- release
|
- deploy
|
||||||
|
|
||||||
|
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
|
||||||
|
|
||||||
|
variables:
|
||||||
|
DOCKER_BUILDKIT: "1"
|
||||||
|
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||||
|
CI_DEBUG_TRACE: "true"
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||||
|
|
||||||
|
# runs on protected branches and pushes to repo
|
||||||
|
build-push:
|
||||||
|
stage: build
|
||||||
|
tags:
|
||||||
|
- integration
|
||||||
|
#script:
|
||||||
|
# - TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
|
||||||
|
script:
|
||||||
|
- TAG=latest sh ./scripts/build-push.sh
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||||
|
when: always
|
||||||
|
|
||||||
|
deploy-dev:
|
||||||
|
stage: deploy
|
||||||
|
trigger: grassrootseconomics/devops
|
||||||
|
when: manual
|
||||||
|
|||||||
32
README.md
32
README.md
@@ -2,25 +2,21 @@
|
|||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
## Make some keys
|
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
|
||||||
|
|
||||||
```
|
```
|
||||||
docker build -t bloxie . && docker run -v "$(pwd)/keys:/root/keys" --rm -it -t bloxie account new --chain /root/bloxberg.json --keys-path /root/keys
|
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
```
|
```
|
||||||
|
|
||||||
|
start services, database, redis and local ethereum node
|
||||||
### Prepare the repo
|
|
||||||
|
|
||||||
This is stuff we need to put in makefile but for now...
|
|
||||||
|
|
||||||
File mounts and permisssions need to be set
|
|
||||||
```
|
```
|
||||||
chmod -R 755 scripts/initdb apps/cic-meta/scripts/initdb
|
docker-compose up -d
|
||||||
````
|
|
||||||
|
|
||||||
start cluster
|
|
||||||
```
|
```
|
||||||
docker-compose up
|
|
||||||
|
Run app/contract-migration to deploy contracts
|
||||||
|
```
|
||||||
|
RUN_MASK=3 docker-compose up contract-migration
|
||||||
```
|
```
|
||||||
|
|
||||||
stop cluster
|
stop cluster
|
||||||
@@ -28,9 +24,9 @@ stop cluster
|
|||||||
docker-compose down
|
docker-compose down
|
||||||
```
|
```
|
||||||
|
|
||||||
delete data
|
stop cluster and delete data
|
||||||
```
|
```
|
||||||
docker-compose down -v
|
docker-compose down -v --remove-orphans
|
||||||
```
|
```
|
||||||
|
|
||||||
rebuild an images
|
rebuild an images
|
||||||
@@ -38,5 +34,7 @@ rebuild an images
|
|||||||
docker-compose up --build <service_name>
|
docker-compose up --build <service_name>
|
||||||
```
|
```
|
||||||
|
|
||||||
Deployment variables are writtend to service-configs/.env after everthing is up.
|
to delete the buildkit cache
|
||||||
|
```
|
||||||
|
docker builder prune --filter type=exec.cachemount
|
||||||
|
```
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
# The solc image messes up the alpine environment, so we have to go all over again
|
|
||||||
FROM python:3.8.6-slim-buster
|
|
||||||
|
|
||||||
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
|
|
||||||
LABEL spdx-license-identifier="GPL-3.0-or-later"
|
|
||||||
LABEL description="Base layer for buiding development images for the cic component suite"
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y git gcc g++ libpq-dev && \
|
|
||||||
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
|
|
||||||
|
|
||||||
|
|
||||||
RUN echo installing nodejs tooling
|
|
||||||
|
|
||||||
COPY ./dev/nvm.sh /root/
|
|
||||||
|
|
||||||
# Install nvm with node and npm
|
|
||||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
|
||||||
ENV NVM_DIR /root/.nvm
|
|
||||||
ENV NODE_VERSION 15.3.0
|
|
||||||
ENV BANCOR_NODE_VERSION 10.16.0
|
|
||||||
|
|
||||||
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
|
|
||||||
&& . $NVM_DIR/nvm.sh \
|
|
||||||
&& nvm install $NODE_VERSION \
|
|
||||||
&& nvm alias default $NODE_VERSION \
|
|
||||||
&& nvm use $NODE_VERSION \
|
|
||||||
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
|
|
||||||
# owner of these files is "1001" by default - wtf
|
|
||||||
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
|
|
||||||
|
|
||||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
|
||||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
## this is an example base image if we wanted one for all the other apps. Its just OS level things
|
|
||||||
@@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
DEBUG=
|
||||||
|
|||||||
@@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
|
DEBUG=
|
||||||
|
|||||||
@@ -2,4 +2,6 @@
|
|||||||
omit =
|
omit =
|
||||||
.venv/*
|
.venv/*
|
||||||
scripts/*
|
scripts/*
|
||||||
cic_cache/db/postgres/*
|
cic_cache/db/migrations/*
|
||||||
|
cic_cache/version.py
|
||||||
|
cic_cache/cli
|
||||||
|
|||||||
4
apps/cic-cache/.dockerignore
Normal file
4
apps/cic-cache/.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
@@ -1,22 +1,17 @@
|
|||||||
.cic_cache_variables:
|
build-test-cic-cache:
|
||||||
variables:
|
stage: test
|
||||||
APP_NAME: cic-cache
|
tags:
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
- integration
|
||||||
|
variables:
|
||||||
.cic_cache_changes_target:
|
APP_NAME: cic-cache
|
||||||
rules:
|
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||||
- changes:
|
script:
|
||||||
- $CONTEXT/$APP_NAME/*
|
- cd apps/cic-cache
|
||||||
|
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||||
build-mr-cic-cache:
|
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||||
extends:
|
allow_failure: true
|
||||||
- .cic_cache_changes_target
|
rules:
|
||||||
- .py_build_merge_request
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
- .cic_cache_variables
|
changes:
|
||||||
|
- apps/$APP_NAME/**/*
|
||||||
build-push-cic-cache:
|
when: always
|
||||||
extends:
|
|
||||||
- .py_build_push
|
|
||||||
- .cic_cache_variables
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1
apps/cic-cache/MANIFEST.in
Normal file
1
apps/cic-cache/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
|||||||
|
include *requirements.txt cic_cache/data/config/*
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
# CIC-CACHE
|
||||||
|
|||||||
@@ -55,15 +55,37 @@ class Api:
|
|||||||
queue=callback_queue,
|
queue=callback_queue,
|
||||||
)
|
)
|
||||||
|
|
||||||
def list(self, offset, limit, address=None):
|
def list(self, offset=0, limit=100, address=None, oldest=False):
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_cache.tasks.tx.tx_filter',
|
'cic_cache.tasks.tx.tx_filter',
|
||||||
[
|
[
|
||||||
0,
|
offset,
|
||||||
100,
|
limit,
|
||||||
address,
|
address,
|
||||||
|
oldest,
|
||||||
],
|
],
|
||||||
queue=None
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s.link(self.callback_success).on_error(self.callback_error)
|
||||||
|
|
||||||
|
t = s.apply_async()
|
||||||
|
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def list_content(self, offset=0, limit=100, address=None, block_offset=None, block_limit=None, oldest=False):
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_cache.tasks.tx.tx_filter_content',
|
||||||
|
[
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
address,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
if self.callback_param != None:
|
if self.callback_param != None:
|
||||||
s.link(self.callback_success).on_error(self.callback_error)
|
s.link(self.callback_success).on_error(self.callback_error)
|
||||||
|
|||||||
@@ -1,30 +1,42 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import logging
|
import logging
|
||||||
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import moolb
|
import moolb
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db import list_transactions_mined
|
from cic_cache.db.list import (
|
||||||
from cic_cache.db import list_transactions_account_mined
|
list_transactions_mined,
|
||||||
|
list_transactions_account_mined,
|
||||||
|
list_transactions_mined_with_data,
|
||||||
|
list_transactions_mined_with_data_index,
|
||||||
|
list_transactions_account_mined_with_data_index,
|
||||||
|
list_transactions_account_mined_with_data,
|
||||||
|
)
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class BloomCache:
|
DEFAULT_FILTER_SIZE = 8192 * 8
|
||||||
|
DEFAULT_LIMIT = 100
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
|
||||||
def __init__(self, session):
|
def __init__(self, session):
|
||||||
self.session = session
|
self.session = session
|
||||||
|
|
||||||
|
|
||||||
|
class BloomCache(Cache):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __get_filter_size(n):
|
def __get_filter_size(n):
|
||||||
n = 8192 * 8
|
n = DEFAULT_FILTER_SIZE
|
||||||
logg.warning('filter size hardcoded to {}'.format(n))
|
logg.warning('filter size hardcoded to {}'.format(n))
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
def load_transactions(self, offset, limit):
|
def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||||
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
||||||
|
|
||||||
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
||||||
@@ -41,7 +53,7 @@ class BloomCache:
|
|||||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
"""
|
"""
|
||||||
rows = list_transactions_mined(self.session, offset, limit)
|
rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||||
|
|
||||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
@@ -50,7 +62,12 @@ class BloomCache:
|
|||||||
for r in rows:
|
for r in rows:
|
||||||
if highest_block == -1:
|
if highest_block == -1:
|
||||||
highest_block = r[0]
|
highest_block = r[0]
|
||||||
lowest_block = r[0]
|
lowest_block = r[0]
|
||||||
|
else:
|
||||||
|
if oldest:
|
||||||
|
highest_block = r[0]
|
||||||
|
else:
|
||||||
|
lowest_block = r[0]
|
||||||
block = r[0].to_bytes(4, byteorder='big')
|
block = r[0].to_bytes(4, byteorder='big')
|
||||||
tx = r[1].to_bytes(4, byteorder='big')
|
tx = r[1].to_bytes(4, byteorder='big')
|
||||||
f_block.add(block)
|
f_block.add(block)
|
||||||
@@ -59,7 +76,7 @@ class BloomCache:
|
|||||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||||
|
|
||||||
|
|
||||||
def load_transactions_account(self, address, offset, limit):
|
def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||||
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
||||||
|
|
||||||
:param address: Address to retrieve transactions for.
|
:param address: Address to retrieve transactions for.
|
||||||
@@ -71,7 +88,7 @@ class BloomCache:
|
|||||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
"""
|
"""
|
||||||
rows = list_transactions_account_mined(self.session, address, offset, limit)
|
rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||||
|
|
||||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||||
@@ -80,10 +97,74 @@ class BloomCache:
|
|||||||
for r in rows:
|
for r in rows:
|
||||||
if highest_block == -1:
|
if highest_block == -1:
|
||||||
highest_block = r[0]
|
highest_block = r[0]
|
||||||
lowest_block = r[0]
|
lowest_block = r[0]
|
||||||
|
else:
|
||||||
|
if oldest:
|
||||||
|
highest_block = r[0]
|
||||||
|
else:
|
||||||
|
lowest_block = r[0]
|
||||||
block = r[0].to_bytes(4, byteorder='big')
|
block = r[0].to_bytes(4, byteorder='big')
|
||||||
tx = r[1].to_bytes(4, byteorder='big')
|
tx = r[1].to_bytes(4, byteorder='big')
|
||||||
f_block.add(block)
|
f_block.add(block)
|
||||||
f_blocktx.add(block + tx)
|
f_blocktx.add(block + tx)
|
||||||
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
||||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||||
|
|
||||||
|
|
||||||
|
class DataCache(Cache):
|
||||||
|
|
||||||
|
def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||||
|
if limit == 0:
|
||||||
|
limit = DEFAULT_LIMIT
|
||||||
|
rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||||
|
return self.__process_rows(rows, oldest)
|
||||||
|
|
||||||
|
|
||||||
|
def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||||
|
if limit == 0:
|
||||||
|
limit = DEFAULT_LIMIT
|
||||||
|
rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||||
|
return self.__process_rows(rows, oldest)
|
||||||
|
|
||||||
|
|
||||||
|
def __process_rows(self, rows, oldest):
|
||||||
|
tx_cache = []
|
||||||
|
highest_block = -1;
|
||||||
|
lowest_block = -1;
|
||||||
|
date_is_str = None # stick this in startup
|
||||||
|
for r in rows:
|
||||||
|
if highest_block == -1:
|
||||||
|
highest_block = r['block_number']
|
||||||
|
lowest_block = r['block_number']
|
||||||
|
else:
|
||||||
|
if oldest:
|
||||||
|
highest_block = r['block_number']
|
||||||
|
else:
|
||||||
|
lowest_block = r['block_number']
|
||||||
|
tx_type = 'unknown'
|
||||||
|
|
||||||
|
if r['value'] != None:
|
||||||
|
tx_type = '{}.{}'.format(r['domain'], r['value'])
|
||||||
|
|
||||||
|
if date_is_str == None:
|
||||||
|
date_is_str = type(r['date_block']).__name__ == 'str'
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'block_number': r['block_number'],
|
||||||
|
'tx_hash': r['tx_hash'],
|
||||||
|
'date_block': r['date_block'],
|
||||||
|
'sender': r['sender'],
|
||||||
|
'recipient': r['recipient'],
|
||||||
|
'from_value': int(r['from_value']),
|
||||||
|
'to_value': int(r['to_value']),
|
||||||
|
'source_token': r['source_token'],
|
||||||
|
'destination_token': r['destination_token'],
|
||||||
|
'success': r['success'],
|
||||||
|
'tx_type': tx_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
if date_is_str:
|
||||||
|
o['date_block'] = datetime.datetime.fromisoformat(r['date_block'])
|
||||||
|
|
||||||
|
tx_cache.append(o)
|
||||||
|
return (lowest_block, highest_block, tx_cache)
|
||||||
|
|||||||
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# local imports
|
||||||
|
from .base import *
|
||||||
|
from .chain import (
|
||||||
|
EthChainInterface,
|
||||||
|
chain_interface,
|
||||||
|
)
|
||||||
|
from .rpc import RPC
|
||||||
|
from .arg import ArgumentParser
|
||||||
|
from .config import Config
|
||||||
|
from .celery import CeleryApp
|
||||||
|
from .registry import (
|
||||||
|
connect_registry,
|
||||||
|
connect_token_registry,
|
||||||
|
connect_declarator,
|
||||||
|
)
|
||||||
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# external imports
|
||||||
|
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import (
|
||||||
|
CICFlag,
|
||||||
|
Flag,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ArgumentParser(BaseArgumentParser):
|
||||||
|
|
||||||
|
def process_local_flags(self, local_arg_flags):
|
||||||
|
if local_arg_flags & CICFlag.CELERY:
|
||||||
|
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-cache', help='Task queue')
|
||||||
|
if local_arg_flags & CICFlag.SYNCER:
|
||||||
|
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
|
||||||
|
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||||
|
if local_arg_flags & CICFlag.CHAIN:
|
||||||
|
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
|
||||||
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# standard imports
|
||||||
|
import enum
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.eth.cli import (
|
||||||
|
argflag_std_read,
|
||||||
|
argflag_std_write,
|
||||||
|
argflag_std_base,
|
||||||
|
Flag,
|
||||||
|
)
|
||||||
|
|
||||||
|
class CICFlag(enum.IntEnum):
|
||||||
|
|
||||||
|
# celery - nibble 1
|
||||||
|
CELERY = 1
|
||||||
|
|
||||||
|
# redis - nibble 2
|
||||||
|
# REDIS = 16
|
||||||
|
# REDIS_CALLBACK = 32
|
||||||
|
|
||||||
|
# chain - nibble 3
|
||||||
|
CHAIN = 256
|
||||||
|
|
||||||
|
# sync - nibble 4
|
||||||
|
SYNCER = 4096
|
||||||
|
|
||||||
|
|
||||||
|
argflag_local_task = CICFlag.CELERY
|
||||||
|
#argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
|
||||||
|
argflag_local_chain = CICFlag.CHAIN
|
||||||
|
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN
|
||||||
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CeleryApp:
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, config):
|
||||||
|
backend_url = config.get('CELERY_RESULT_URL')
|
||||||
|
broker_url = config.get('CELERY_BROKER_URL')
|
||||||
|
celery_app = None
|
||||||
|
if backend_url != None:
|
||||||
|
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
|
||||||
|
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
|
||||||
|
else:
|
||||||
|
celery_app = celery.Celery(broker=broker_url)
|
||||||
|
logg.info('creating celery app without results backend on {}'.format(broker_url))
|
||||||
|
|
||||||
|
return celery_app
|
||||||
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# external imports
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_by_number,
|
||||||
|
Block,
|
||||||
|
)
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
receipt,
|
||||||
|
Tx,
|
||||||
|
)
|
||||||
|
from chainlib.interface import ChainInterface
|
||||||
|
|
||||||
|
|
||||||
|
class EthChainInterface(ChainInterface):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._tx_receipt = receipt
|
||||||
|
self._block_by_number = block_by_number
|
||||||
|
self._block_from_src = Block.from_src
|
||||||
|
self._src_normalize = Tx.src_normalize
|
||||||
|
|
||||||
|
chain_interface = EthChainInterface()
|
||||||
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.eth.cli import (
|
||||||
|
Config as BaseConfig,
|
||||||
|
Flag,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import CICFlag
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Config(BaseConfig):
|
||||||
|
|
||||||
|
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
|
||||||
|
expanded_base_config_dir = [cls.local_base_config_dir]
|
||||||
|
if base_config_dir != None:
|
||||||
|
if isinstance(base_config_dir, str):
|
||||||
|
base_config_dir = [base_config_dir]
|
||||||
|
for d in base_config_dir:
|
||||||
|
expanded_base_config_dir.append(d)
|
||||||
|
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
|
||||||
|
|
||||||
|
local_args_override = {}
|
||||||
|
# if local_arg_flags & CICFlag.REDIS:
|
||||||
|
# local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
|
||||||
|
# local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
|
||||||
|
# local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
|
||||||
|
# local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
|
||||||
|
|
||||||
|
if local_arg_flags & CICFlag.CHAIN:
|
||||||
|
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
|
||||||
|
|
||||||
|
if local_arg_flags & CICFlag.CELERY:
|
||||||
|
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
|
||||||
|
|
||||||
|
if local_arg_flags & CICFlag.SYNCER:
|
||||||
|
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
|
||||||
|
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
|
||||||
|
|
||||||
|
config.dict_override(local_args_override, 'local cli args')
|
||||||
|
|
||||||
|
# if local_arg_flags & CICFlag.REDIS_CALLBACK:
|
||||||
|
# config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
|
||||||
|
# config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
|
||||||
|
|
||||||
|
if local_arg_flags & CICFlag.CELERY:
|
||||||
|
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
|
||||||
|
|
||||||
|
logg.debug('config loaded:\n{}'.format(config))
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
|
||||||
|
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def connect_token_registry(self, conn, chain_spec, sender_address=ZERO_ADDRESS):
|
||||||
|
registry = CICRegistry(chain_spec, conn)
|
||||||
|
token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
|
||||||
|
logg.debug('using token registry address {}'.format(token_registry_address))
|
||||||
|
lookup = TokenIndexLookup(chain_spec, token_registry_address)
|
||||||
|
CICRegistry.add_lookup(lookup)
|
||||||
|
|
||||||
|
|
||||||
|
def connect_declarator(self, conn, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
|
||||||
|
registry = CICRegistry(chain_spec, conn)
|
||||||
|
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||||
|
logg.debug('using declarator address {}'.format(declarator_address))
|
||||||
|
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
|
||||||
|
CICRegistry.add_lookup(lookup)
|
||||||
|
|
||||||
|
|
||||||
|
def connect_registry(conn, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
|
||||||
|
CICRegistry.address = registry_address
|
||||||
|
registry = CICRegistry(chain_spec, conn)
|
||||||
|
registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
|
||||||
|
return registry
|
||||||
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.connection import (
|
||||||
|
RPCConnection,
|
||||||
|
ConnType,
|
||||||
|
)
|
||||||
|
from chainlib.eth.connection import EthUnixSignerConnection
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RPC:
|
||||||
|
|
||||||
|
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.rpc_provider = rpc_provider
|
||||||
|
self.signer_provider = signer_provider
|
||||||
|
|
||||||
|
|
||||||
|
def get_default(self):
|
||||||
|
return RPCConnection.connect(self.chain_spec, 'default')
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_config(config):
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||||
|
RPCConnection.register_location(config.get('RPC_PROVIDER'), chain_spec, 'default')
|
||||||
|
if config.get('SIGNER_PROVIDER'):
|
||||||
|
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, tag='signer')
|
||||||
|
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, 'signer')
|
||||||
|
rpc = RPC(chain_spec, config.get('RPC_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
|
||||||
|
logg.info('set up rpc: {}'.format(rpc))
|
||||||
|
return rpc
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[celery]
|
||||||
|
broker_url = redis://localhost:6379
|
||||||
|
result_url =
|
||||||
|
queue = cic-cache
|
||||||
|
debug = 0
|
||||||
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[cic]
|
||||||
|
registry_address =
|
||||||
|
trust_address =
|
||||||
|
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
|
||||||
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[database]
|
||||||
|
engine =
|
||||||
|
driver =
|
||||||
|
host =
|
||||||
|
port =
|
||||||
|
name = cic-cache
|
||||||
|
user =
|
||||||
|
password =
|
||||||
|
debug = 0
|
||||||
|
pool_size = 0
|
||||||
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[signer]
|
||||||
|
provider =
|
||||||
4
apps/cic-cache/cic_cache/data/config/syncer.ini
Normal file
4
apps/cic-cache/cic_cache/data/config/syncer.ini
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[syncer]
|
||||||
|
loop_interval = 1
|
||||||
|
offset = 0
|
||||||
|
no_history = 0
|
||||||
@@ -2,9 +2,14 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .list import list_transactions_mined
|
from .list import (
|
||||||
from .list import list_transactions_account_mined
|
list_transactions_mined,
|
||||||
from .list import add_transaction
|
list_transactions_account_mined,
|
||||||
|
add_transaction,
|
||||||
|
tag_transaction,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,9 @@
|
|||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@@ -12,6 +13,9 @@ def list_transactions_mined(
|
|||||||
session,
|
session,
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
):
|
):
|
||||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||||
|
|
||||||
@@ -22,7 +26,154 @@ def list_transactions_mined(
|
|||||||
:result: Result set
|
:result: Result set
|
||||||
:rtype: SQLAlchemy.ResultProxy
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
"""
|
"""
|
||||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def list_transactions_mined_with_data(
|
||||||
|
session,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
|
):
|
||||||
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||||
|
|
||||||
|
:param block_offset: First block to include in search
|
||||||
|
:type block_offset: int
|
||||||
|
:param block_limit: Last block to include in search
|
||||||
|
:type block_limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||||
|
|
||||||
|
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def list_transactions_mined_with_data_index(
|
||||||
|
session,
|
||||||
|
offset,
|
||||||
|
end,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
|
):
|
||||||
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||||
|
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
|
||||||
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, offset, end)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, offset, end)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, offset, end)
|
||||||
|
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def list_transactions_account_mined_with_data_index(
|
||||||
|
session,
|
||||||
|
address,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
|
):
|
||||||
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit, filtered by address
|
||||||
|
|
||||||
|
:param offset: Offset in data set to return transactions from
|
||||||
|
:type offset: int
|
||||||
|
:param limit: Max number of transactions to retrieve
|
||||||
|
:type limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
|
||||||
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||||
|
|
||||||
|
r = session.execute(s)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def list_transactions_account_mined_with_data(
|
||||||
|
session,
|
||||||
|
address,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
|
):
|
||||||
|
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||||
|
|
||||||
|
:param block_offset: First block to include in search
|
||||||
|
:type block_offset: int
|
||||||
|
:param block_limit: Last block to include in search
|
||||||
|
:type block_limit: int
|
||||||
|
:result: Result set
|
||||||
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
|
"""
|
||||||
|
|
||||||
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||||
|
|
||||||
r = session.execute(s)
|
r = session.execute(s)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@@ -32,6 +183,9 @@ def list_transactions_account_mined(
|
|||||||
address,
|
address,
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
|
block_offset,
|
||||||
|
block_limit,
|
||||||
|
oldest=False,
|
||||||
):
|
):
|
||||||
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
||||||
|
|
||||||
@@ -44,13 +198,27 @@ def list_transactions_account_mined(
|
|||||||
:result: Result set
|
:result: Result set
|
||||||
:rtype: SQLAlchemy.ResultProxy
|
:rtype: SQLAlchemy.ResultProxy
|
||||||
"""
|
"""
|
||||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
|
|
||||||
|
order_by = 'DESC'
|
||||||
|
if oldest:
|
||||||
|
order_by = 'ASC'
|
||||||
|
|
||||||
|
if block_offset:
|
||||||
|
if block_limit:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||||
|
else:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||||
|
|
||||||
|
else:
|
||||||
|
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||||
|
|
||||||
r = session.execute(s)
|
r = session.execute(s)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
def add_transaction(
|
def add_transaction(
|
||||||
session, tx_hash,
|
session,
|
||||||
|
tx_hash,
|
||||||
block_number,
|
block_number,
|
||||||
tx_index,
|
tx_index,
|
||||||
sender,
|
sender,
|
||||||
@@ -62,6 +230,33 @@ def add_transaction(
|
|||||||
success,
|
success,
|
||||||
timestamp,
|
timestamp,
|
||||||
):
|
):
|
||||||
|
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param block_number: Block number
|
||||||
|
:type block_number: int
|
||||||
|
:param tx_index: Transaction index in block
|
||||||
|
:type tx_index: int
|
||||||
|
:param sender: Ethereum address of effective sender
|
||||||
|
:type sender: str, 0x-hex
|
||||||
|
:param receiver: Ethereum address of effective recipient
|
||||||
|
:type receiver: str, 0x-hex
|
||||||
|
:param source_token: Ethereum address of token used by sender
|
||||||
|
:type source_token: str, 0x-hex
|
||||||
|
:param destination_token: Ethereum address of token received by recipient
|
||||||
|
:type destination_token: str, 0x-hex
|
||||||
|
:param from_value: Source token value spent in transaction
|
||||||
|
:type from_value: int
|
||||||
|
:param to_value: Destination token value received in transaction
|
||||||
|
:type to_value: int
|
||||||
|
:param success: True if code execution on network was successful
|
||||||
|
:type success: bool
|
||||||
|
:param date_block: Block timestamp
|
||||||
|
:type date_block: datetime
|
||||||
|
"""
|
||||||
date_block = datetime.datetime.fromtimestamp(timestamp)
|
date_block = datetime.datetime.fromtimestamp(timestamp)
|
||||||
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
||||||
tx_hash,
|
tx_hash,
|
||||||
@@ -77,3 +272,74 @@ def add_transaction(
|
|||||||
date_block,
|
date_block,
|
||||||
)
|
)
|
||||||
session.execute(s)
|
session.execute(s)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def tag_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Tag a single transaction with a single tag.
|
||||||
|
|
||||||
|
Tag must already exist in storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises ValueError: Unknown tag or transaction hash
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = text("SELECT id from tx where tx_hash = :a")
|
||||||
|
r = session.execute(s, {'a': tx_hash}).fetchall()
|
||||||
|
tx_id = r[0].values()[0]
|
||||||
|
|
||||||
|
if tx_id == None:
|
||||||
|
raise ValueError('unknown tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
#s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
if domain == None:
|
||||||
|
s = text("SELECT id from tag where value = :a")
|
||||||
|
else:
|
||||||
|
s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
|
||||||
|
tag_id = r[0].values()[0]
|
||||||
|
|
||||||
|
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
|
||||||
|
|
||||||
|
if tag_id == None:
|
||||||
|
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
|
||||||
|
|
||||||
|
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
|
||||||
|
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
|
||||||
|
|
||||||
|
|
||||||
|
def add_tag(
|
||||||
|
session,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Add a single tag to storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises sqlalchemy.exc.IntegrityError: Tag already exists
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = None
|
||||||
|
if domain == None:
|
||||||
|
s = text("INSERT INTO tag (value) VALUES (:b)")
|
||||||
|
else:
|
||||||
|
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
|
||||||
|
session.execute(s, {'a': domain, 'b': name})
|
||||||
|
|||||||
@@ -1,28 +1,28 @@
|
|||||||
"""Add chain syncer
|
"""Add chain syncer
|
||||||
|
|
||||||
Revision ID: ec40ac0974c1
|
Revision ID: 6604de4203e2
|
||||||
Revises: 6ac7a1dadc46
|
Revises: 63b629f14a85
|
||||||
Create Date: 2021-02-23 06:10:19.246304
|
Create Date: 2021-04-01 08:10:29.156243
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from chainsyncer.db.migrations.sqlalchemy import (
|
from chainsyncer.db.migrations.default.export import (
|
||||||
chainsyncer_upgrade,
|
chainsyncer_upgrade,
|
||||||
chainsyncer_downgrade,
|
chainsyncer_downgrade,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = 'ec40ac0974c1'
|
revision = '6604de4203e2'
|
||||||
down_revision = '6ac7a1dadc46'
|
down_revision = '63b629f14a85'
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
chainsyncer_upgrade(0, 0, 1)
|
chainsyncer_upgrade(0, 0, 1)
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
chainsyncer_downgrade(0, 0, 1)
|
chainsyncer_downgrade(0, 0, 1)
|
||||||
|
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
"""Transaction tags
|
||||||
|
|
||||||
|
Revision ID: aaf2bdce7d6e
|
||||||
|
Revises: 6604de4203e2
|
||||||
|
Create Date: 2021-05-01 09:20:20.775082
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'aaf2bdce7d6e'
|
||||||
|
down_revision = '6604de4203e2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tag',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('domain', sa.String(), nullable=True),
|
||||||
|
sa.Column('value', sa.String(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'tag_tx_link',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
|
||||||
|
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tag_tx_link')
|
||||||
|
op.drop_index('idx_tag_domain_value')
|
||||||
|
op.drop_table('tag')
|
||||||
@@ -100,3 +100,4 @@ class SessionBase(Model):
|
|||||||
logg.debug('destroying session {}'.format(session_key))
|
logg.debug('destroying session {}'.format(session_key))
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
del SessionBase.localsessions[session_key]
|
||||||
|
|||||||
@@ -0,0 +1,2 @@
|
|||||||
|
from .erc20 import *
|
||||||
|
from .faucet import *
|
||||||
27
apps/cic-cache/cic_cache/runnable/daemons/filters/base.py
Normal file
27
apps/cic-cache/cic_cache/runnable/daemons/filters/base.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
class TagSyncFilter:
|
||||||
|
"""Holds tag name and domain for an implementing filter.
|
||||||
|
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, domain=None):
|
||||||
|
self.tag_name = name
|
||||||
|
self.tag_domain = domain
|
||||||
|
|
||||||
|
|
||||||
|
def tag(self):
|
||||||
|
"""Return tag value/domain.
|
||||||
|
|
||||||
|
:rtype: Tuple
|
||||||
|
:returns: tag value/domain.
|
||||||
|
"""
|
||||||
|
return (self.tag_name, self.tag_domain)
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.tag_domain == None:
|
||||||
|
return self.tag_name
|
||||||
|
return '{}.{}'.format(self.tag_domain, self.tag_name)
|
||||||
83
apps/cic-cache/cic_cache/runnable/daemons/filters/erc20.py
Normal file
83
apps/cic-cache/cic_cache/runnable/daemons/filters/erc20.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.eth.address import (
|
||||||
|
to_checksum_address,
|
||||||
|
)
|
||||||
|
from chainlib.eth.error import RequestMismatchException
|
||||||
|
from chainlib.status import Status
|
||||||
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
|
from cic_eth_registry.error import (
|
||||||
|
NotAContractError,
|
||||||
|
ContractMismatchError,
|
||||||
|
)
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import TagSyncFilter
|
||||||
|
from cic_cache import db as cic_cache_db
|
||||||
|
|
||||||
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ERC20TransferFilter(TagSyncFilter):
|
||||||
|
|
||||||
|
def __init__(self, chain_spec):
|
||||||
|
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Verify token in declarator / token index
|
||||||
|
def filter(self, conn, block, tx, db_session=None):
|
||||||
|
logg.debug('filter {} {}'.format(block, tx))
|
||||||
|
token = None
|
||||||
|
try:
|
||||||
|
token = ERC20Token(self.chain_spec, conn, tx.inputs[0])
|
||||||
|
except NotAContractError:
|
||||||
|
logg.debug('not a contract {}'.format(tx.inputs[0]))
|
||||||
|
return False
|
||||||
|
except ContractMismatchError:
|
||||||
|
logg.debug('not an erc20 token {}'.format(tx.inputs[0]))
|
||||||
|
return False
|
||||||
|
|
||||||
|
transfer_data = None
|
||||||
|
try:
|
||||||
|
transfer_data = ERC20.parse_transfer_request(tx.payload)
|
||||||
|
except RequestMismatchException:
|
||||||
|
logg.debug('erc20 match but not a transfer, skipping')
|
||||||
|
return False
|
||||||
|
except ValueError:
|
||||||
|
logg.debug('erc20 match but bogus data, skipping')
|
||||||
|
return False
|
||||||
|
|
||||||
|
token_sender = tx.outputs[0]
|
||||||
|
token_recipient = transfer_data[0]
|
||||||
|
token_value = transfer_data[1]
|
||||||
|
|
||||||
|
logg.debug('matched erc20 token transfer {} ({}) to {} value {}'.format(token.name, token.address, transfer_data[0], transfer_data[1]))
|
||||||
|
|
||||||
|
cic_cache_db.add_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
block.number,
|
||||||
|
tx.index,
|
||||||
|
to_checksum_address(token_sender),
|
||||||
|
to_checksum_address(token_recipient),
|
||||||
|
token.address,
|
||||||
|
token.address,
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
tx.status == Status.SUCCESS,
|
||||||
|
block.timestamp,
|
||||||
|
)
|
||||||
|
db_session.flush()
|
||||||
|
cic_cache_db.tag_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
self.tag_name,
|
||||||
|
domain=self.tag_domain,
|
||||||
|
)
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
73
apps/cic-cache/cic_cache/runnable/daemons/filters/faucet.py
Normal file
73
apps/cic-cache/cic_cache/runnable/daemons/filters/faucet.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from erc20_faucet import Faucet
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.status import Status
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.db as cic_cache_db
|
||||||
|
from .base import TagSyncFilter
|
||||||
|
|
||||||
|
#logg = logging.getLogger().getChild(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class FaucetFilter(TagSyncFilter):
|
||||||
|
|
||||||
|
def __init__(self, chain_spec, sender_address=ZERO_ADDRESS):
|
||||||
|
super(FaucetFilter, self).__init__('give_to', domain='faucet')
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.sender_address = sender_address
|
||||||
|
|
||||||
|
|
||||||
|
def filter(self, conn, block, tx, db_session=None):
|
||||||
|
try:
|
||||||
|
data = strip_0x(tx.payload)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
logg.debug('data {}'.format(data))
|
||||||
|
if Faucet.method_for(data[:8]) == None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
token_sender = tx.inputs[0]
|
||||||
|
token_recipient = data[64+8-40:]
|
||||||
|
logg.debug('token recipient {}'.format(token_recipient))
|
||||||
|
|
||||||
|
f = Faucet(self.chain_spec)
|
||||||
|
o = f.token(token_sender, sender_address=self.sender_address)
|
||||||
|
r = conn.do(o)
|
||||||
|
token = f.parse_token(r)
|
||||||
|
|
||||||
|
f = Faucet(self.chain_spec)
|
||||||
|
o = f.token_amount(token_sender, sender_address=self.sender_address)
|
||||||
|
r = conn.do(o)
|
||||||
|
token_value = f.parse_token_amount(r)
|
||||||
|
|
||||||
|
cic_cache_db.add_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
block.number,
|
||||||
|
tx.index,
|
||||||
|
to_checksum_address(token_sender),
|
||||||
|
to_checksum_address(token_recipient),
|
||||||
|
token,
|
||||||
|
token,
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
tx.status == Status.SUCCESS,
|
||||||
|
block.timestamp,
|
||||||
|
)
|
||||||
|
db_session.flush()
|
||||||
|
cic_cache_db.tag_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
self.tag_name,
|
||||||
|
domain=self.tag_domain,
|
||||||
|
)
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
115
apps/cic-cache/cic_cache/runnable/daemons/query.py
Normal file
115
apps/cic-cache/cic_cache/runnable/daemons/query.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import base64
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from hexathon import add_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.cache import (
|
||||||
|
BloomCache,
|
||||||
|
DataCache,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
#logg = logging.getLogger()
|
||||||
|
|
||||||
|
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||||
|
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||||
|
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
|
||||||
|
|
||||||
|
DEFAULT_LIMIT = 100
|
||||||
|
|
||||||
|
|
||||||
|
def process_transactions_account_bloom(session, env):
|
||||||
|
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
|
||||||
|
address = r[1]
|
||||||
|
if r[2] == None:
|
||||||
|
address = add_0x(address)
|
||||||
|
offset = 0
|
||||||
|
if r.lastindex > 2:
|
||||||
|
offset = r[4]
|
||||||
|
limit = DEFAULT_LIMIT
|
||||||
|
if r.lastindex > 4:
|
||||||
|
limit = r[6]
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'alg': 'sha256',
|
||||||
|
'low': lowest_block,
|
||||||
|
'high': highest_block,
|
||||||
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||||
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||||
|
'filter_rounds': 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
j = json.dumps(o)
|
||||||
|
|
||||||
|
return ('application/json', j.encode('utf-8'),)
|
||||||
|
|
||||||
|
|
||||||
|
def process_transactions_all_bloom(session, env):
|
||||||
|
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
|
||||||
|
offset = DEFAULT_LIMIT
|
||||||
|
if r.lastindex > 0:
|
||||||
|
offset = r[1]
|
||||||
|
limit = 0
|
||||||
|
if r.lastindex > 1:
|
||||||
|
limit = r[2]
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'alg': 'sha256',
|
||||||
|
'low': lowest_block,
|
||||||
|
'high': highest_block,
|
||||||
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
||||||
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
||||||
|
'filter_rounds': 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
j = json.dumps(o)
|
||||||
|
|
||||||
|
return ('application/json', j.encode('utf-8'),)
|
||||||
|
|
||||||
|
|
||||||
|
def process_transactions_all_data(session, env):
|
||||||
|
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||||
|
return None
|
||||||
|
|
||||||
|
logg.debug('got data request {}'.format(env))
|
||||||
|
block_offset = r[1]
|
||||||
|
block_end = r[2]
|
||||||
|
if int(r[2]) < int(r[1]):
|
||||||
|
raise ValueError('cart before the horse, dude')
|
||||||
|
|
||||||
|
c = DataCache(session)
|
||||||
|
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
|
||||||
|
|
||||||
|
for r in tx_cache:
|
||||||
|
r['date_block'] = r['date_block'].timestamp()
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'low': lowest_block,
|
||||||
|
'high': highest_block,
|
||||||
|
'data': tx_cache,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
j = json.dumps(o)
|
||||||
|
|
||||||
|
return ('application/json', j.encode('utf-8'),)
|
||||||
@@ -1,18 +1,20 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import logging
|
import logging
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import confini
|
import confini
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache import BloomCache
|
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
from cic_cache.runnable.daemons.query import (
|
||||||
|
process_transactions_account_bloom,
|
||||||
|
process_transactions_all_bloom,
|
||||||
|
process_transactions_all_data,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@@ -44,72 +46,6 @@ logg.debug('config:\n{}'.format(config))
|
|||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
|
||||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
|
||||||
|
|
||||||
DEFAULT_LIMIT = 100
|
|
||||||
|
|
||||||
|
|
||||||
def process_transactions_account_bloom(session, env):
|
|
||||||
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
|
||||||
if not r:
|
|
||||||
return None
|
|
||||||
|
|
||||||
address = r[1]
|
|
||||||
if r[2] == None:
|
|
||||||
address = '0x' + address
|
|
||||||
offset = DEFAULT_LIMIT
|
|
||||||
if r.lastindex > 2:
|
|
||||||
offset = r[3]
|
|
||||||
limit = 0
|
|
||||||
if r.lastindex > 3:
|
|
||||||
limit = r[4]
|
|
||||||
|
|
||||||
c = BloomCache(session)
|
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
|
||||||
|
|
||||||
o = {
|
|
||||||
'alg': 'sha256',
|
|
||||||
'low': lowest_block,
|
|
||||||
'high': highest_block,
|
|
||||||
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
|
||||||
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
|
||||||
'filter_rounds': 3,
|
|
||||||
}
|
|
||||||
|
|
||||||
j = json.dumps(o)
|
|
||||||
|
|
||||||
return ('application/json', j.encode('utf-8'),)
|
|
||||||
|
|
||||||
|
|
||||||
def process_transactions_all_bloom(session, env):
|
|
||||||
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
|
||||||
if not r:
|
|
||||||
return None
|
|
||||||
|
|
||||||
offset = DEFAULT_LIMIT
|
|
||||||
if r.lastindex > 0:
|
|
||||||
offset = r[1]
|
|
||||||
limit = 0
|
|
||||||
if r.lastindex > 1:
|
|
||||||
limit = r[2]
|
|
||||||
|
|
||||||
c = BloomCache(session)
|
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
|
||||||
|
|
||||||
o = {
|
|
||||||
'alg': 'sha256',
|
|
||||||
'low': lowest_block,
|
|
||||||
'high': highest_block,
|
|
||||||
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
|
||||||
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
|
||||||
'filter_rounds': 3,
|
|
||||||
}
|
|
||||||
|
|
||||||
j = json.dumps(o)
|
|
||||||
|
|
||||||
return ('application/json', j.encode('utf-8'),)
|
|
||||||
|
|
||||||
|
|
||||||
# uwsgi application
|
# uwsgi application
|
||||||
def application(env, start_response):
|
def application(env, start_response):
|
||||||
@@ -119,10 +55,16 @@ def application(env, start_response):
|
|||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
for handler in [
|
for handler in [
|
||||||
|
process_transactions_all_data,
|
||||||
process_transactions_all_bloom,
|
process_transactions_all_bloom,
|
||||||
process_transactions_account_bloom,
|
process_transactions_account_bloom,
|
||||||
]:
|
]:
|
||||||
r = handler(session, env)
|
r = None
|
||||||
|
try:
|
||||||
|
r = handler(session, env)
|
||||||
|
except ValueError as e:
|
||||||
|
start_response('400 {}'.format(str(e)))
|
||||||
|
return []
|
||||||
if r != None:
|
if r != None:
|
||||||
(mime_type, content) = r
|
(mime_type, content) = r
|
||||||
break
|
break
|
||||||
147
apps/cic-cache/cic_cache/runnable/daemons/tracker.py
Normal file
147
apps/cic-cache/cic_cache/runnable/daemons/tracker.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import sqlalchemy
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.error import UnknownContractError
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_latest,
|
||||||
|
)
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
)
|
||||||
|
from chainsyncer.backend.sql import SQLBackend
|
||||||
|
from chainsyncer.driver.head import HeadSyncer
|
||||||
|
from chainsyncer.driver.history import HistorySyncer
|
||||||
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
|
from cic_cache.db import (
|
||||||
|
dsn_from_config,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
|
from cic_cache.runnable.daemons.filters import (
|
||||||
|
ERC20TransferFilter,
|
||||||
|
FaucetFilter,
|
||||||
|
)
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
# process args
|
||||||
|
arg_flags = cic_cache.cli.argflag_std_read
|
||||||
|
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||||
|
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||||
|
argparser.process_local_flags(local_arg_flags)
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
# process config
|
||||||
|
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||||
|
|
||||||
|
# connect to database
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
|
# set up rpc
|
||||||
|
rpc = cic_cache.cli.RPC.from_config(config)
|
||||||
|
conn = rpc.get_default()
|
||||||
|
|
||||||
|
# set up chain provisions
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||||
|
registry = None
|
||||||
|
try:
|
||||||
|
registry = cic_cache.cli.connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||||
|
except UnknownContractError as e:
|
||||||
|
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||||
|
sys.exit(1)
|
||||||
|
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||||
|
|
||||||
|
|
||||||
|
def register_filter_tags(filters, session):
|
||||||
|
for f in filters:
|
||||||
|
tag = f.tag()
|
||||||
|
try:
|
||||||
|
add_tag(session, tag[0], domain=tag[1])
|
||||||
|
session.commit()
|
||||||
|
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
except sqlalchemy.exc.IntegrityError:
|
||||||
|
session.rollback()
|
||||||
|
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Connect to blockchain with chainlib
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
|
o = block_latest()
|
||||||
|
r = rpc.do(o)
|
||||||
|
block_offset = int(strip_0x(r), 16) + 1
|
||||||
|
|
||||||
|
logg.debug('current block height {}'.format(block_offset))
|
||||||
|
|
||||||
|
syncers = []
|
||||||
|
|
||||||
|
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||||
|
|
||||||
|
if len(syncer_backends) == 0:
|
||||||
|
initial_block_start = config.get('SYNCER_OFFSET')
|
||||||
|
initial_block_offset = block_offset
|
||||||
|
if config.get('SYNCER_NO_HISTORY'):
|
||||||
|
initial_block_start = block_offset
|
||||||
|
initial_block_offset += 1
|
||||||
|
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||||
|
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
|
||||||
|
else:
|
||||||
|
for syncer_backend in syncer_backends:
|
||||||
|
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||||
|
|
||||||
|
for syncer_backend in syncer_backends:
|
||||||
|
syncers.append(HistorySyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||||
|
|
||||||
|
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
||||||
|
syncers.append(HeadSyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||||
|
|
||||||
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||||
|
if trusted_addresses_src == None:
|
||||||
|
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||||
|
sys.exit(1)
|
||||||
|
trusted_addresses = trusted_addresses_src.split(',')
|
||||||
|
for address in trusted_addresses:
|
||||||
|
logg.info('using trusted address {}'.format(address))
|
||||||
|
|
||||||
|
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
|
||||||
|
faucet_filter = FaucetFilter(chain_spec)
|
||||||
|
|
||||||
|
filters = [
|
||||||
|
erc20_transfer_filter,
|
||||||
|
faucet_filter,
|
||||||
|
]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
register_filter_tags(filters, session)
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for syncer in syncers:
|
||||||
|
logg.debug('running syncer index {}'.format(i))
|
||||||
|
for f in filters:
|
||||||
|
syncer.add_filter(f)
|
||||||
|
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
||||||
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -1,339 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
import enum
|
|
||||||
import re
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import confini
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_registry.chain import (
|
|
||||||
ChainRegistry,
|
|
||||||
ChainSpec,
|
|
||||||
)
|
|
||||||
#from cic_registry.bancor import BancorRegistryClient
|
|
||||||
from cic_registry.token import Token
|
|
||||||
from cic_registry.error import (
|
|
||||||
UnknownContractError,
|
|
||||||
UnknownDeclarationError,
|
|
||||||
)
|
|
||||||
from cic_registry.declaration import to_token_declaration
|
|
||||||
from web3.exceptions import BlockNotFound, TransactionNotFound
|
|
||||||
from websockets.exceptions import ConnectionClosedError
|
|
||||||
from requests.exceptions import ConnectionError
|
|
||||||
import web3
|
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_cache import db
|
|
||||||
from cic_cache.db.models.base import SessionBase
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
|
||||||
logg = logging.getLogger()
|
|
||||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
|
||||||
|
|
||||||
log_topics = {
|
|
||||||
'transfer': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
|
|
||||||
'convert': '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095',
|
|
||||||
'accountregistry_add': '0a3b0a4f4c6e53dce3dbcad5614cb2ba3a0fa7326d03c5d64b4fa2d565492737',
|
|
||||||
}
|
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
|
||||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
|
||||||
argparser.add_argument('--trust-address', default=[], type=str, dest='trust_address', action='append', help='Set address as trust')
|
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
|
||||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
|
||||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
|
||||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
|
||||||
args = argparser.parse_args(sys.argv[1:])
|
|
||||||
|
|
||||||
config_dir = os.path.join(args.c)
|
|
||||||
os.makedirs(config_dir, 0o777, True)
|
|
||||||
|
|
||||||
|
|
||||||
if args.v == True:
|
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
|
||||||
elif args.vv == True:
|
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
config = confini.Config(config_dir, args.env_prefix)
|
|
||||||
config.process()
|
|
||||||
args_override = {
|
|
||||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
|
||||||
'CIC_TRUST_ADDRESS': ",".join(getattr(args, 'trust_address', [])),
|
|
||||||
}
|
|
||||||
config.dict_override(args_override, 'cli flag')
|
|
||||||
config.censor('PASSWORD', 'DATABASE')
|
|
||||||
config.censor('PASSWORD', 'SSL')
|
|
||||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
|
||||||
|
|
||||||
# connect to database
|
|
||||||
dsn = db.dsn_from_config(config)
|
|
||||||
SessionBase.connect(dsn)
|
|
||||||
|
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
|
|
||||||
|
|
||||||
class RunStateEnum(enum.IntEnum):
|
|
||||||
INIT = 0
|
|
||||||
RUN = 1
|
|
||||||
TERMINATE = 9
|
|
||||||
|
|
||||||
|
|
||||||
def rubberstamp(src):
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class Tracker:
|
|
||||||
|
|
||||||
def __init__(self, chain_spec, trusts=[]):
|
|
||||||
self.block_height = 0
|
|
||||||
self.tx_height = 0
|
|
||||||
self.state = RunStateEnum.INIT
|
|
||||||
self.declarator_cache = {}
|
|
||||||
self.convert_enabled = False
|
|
||||||
self.trusts = trusts
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', 'Declarator')
|
|
||||||
|
|
||||||
|
|
||||||
def __process_tx(self, w3, session, t, r, l, b):
|
|
||||||
token_value = int(l.data, 16)
|
|
||||||
token_sender = l.topics[1][-20:].hex()
|
|
||||||
token_recipient = l.topics[2][-20:].hex()
|
|
||||||
|
|
||||||
#ts = ContractRegistry.get_address(t.address)
|
|
||||||
ts = CICRegistry.get_address(self.chain_spec, t.address())
|
|
||||||
logg.info('add token transfer {} value {} from {} to {}'.format(
|
|
||||||
ts.symbol(),
|
|
||||||
token_value,
|
|
||||||
token_sender,
|
|
||||||
token_recipient,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
db.add_transaction(
|
|
||||||
session,
|
|
||||||
r.transactionHash.hex(),
|
|
||||||
r.blockNumber,
|
|
||||||
r.transactionIndex,
|
|
||||||
w3.toChecksumAddress(token_sender),
|
|
||||||
w3.toChecksumAddress(token_recipient),
|
|
||||||
t.address(),
|
|
||||||
t.address(),
|
|
||||||
token_value,
|
|
||||||
token_value,
|
|
||||||
r.status == 1,
|
|
||||||
b.timestamp,
|
|
||||||
)
|
|
||||||
session.flush()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: simplify/ split up and/or comment, function is too long
|
|
||||||
def __process_convert(self, w3, session, t, r, l, b):
|
|
||||||
logg.warning('conversions are deactivated')
|
|
||||||
return
|
|
||||||
# token_source = l.topics[2][-20:].hex()
|
|
||||||
# token_source = w3.toChecksumAddress(token_source)
|
|
||||||
# token_destination = l.topics[3][-20:].hex()
|
|
||||||
# token_destination = w3.toChecksumAddress(token_destination)
|
|
||||||
# data_noox = l.data[2:]
|
|
||||||
# d = data_noox[:64]
|
|
||||||
# token_from_value = int(d, 16)
|
|
||||||
# d = data_noox[64:128]
|
|
||||||
# token_to_value = int(d, 16)
|
|
||||||
# token_trader = '0x' + data_noox[192-40:]
|
|
||||||
#
|
|
||||||
# #ts = ContractRegistry.get_address(token_source)
|
|
||||||
# ts = CICRegistry.get_address(CICRegistry.bancor_chain_spec, t.address())
|
|
||||||
# #if ts == None:
|
|
||||||
# # ts = ContractRegistry.reserves[token_source]
|
|
||||||
# td = ContractRegistry.get_address(token_destination)
|
|
||||||
# #if td == None:
|
|
||||||
# # td = ContractRegistry.reserves[token_source]
|
|
||||||
# logg.info('add token convert {} -> {} value {} -> {} trader {}'.format(
|
|
||||||
# ts.symbol(),
|
|
||||||
# td.symbol(),
|
|
||||||
# token_from_value,
|
|
||||||
# token_to_value,
|
|
||||||
# token_trader,
|
|
||||||
# )
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# db.add_transaction(
|
|
||||||
# session,
|
|
||||||
# r.transactionHash.hex(),
|
|
||||||
# r.blockNumber,
|
|
||||||
# r.transactionIndex,
|
|
||||||
# w3.toChecksumAddress(token_trader),
|
|
||||||
# w3.toChecksumAddress(token_trader),
|
|
||||||
# token_source,
|
|
||||||
# token_destination,
|
|
||||||
# r.status == 1,
|
|
||||||
# b.timestamp,
|
|
||||||
# )
|
|
||||||
# session.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def check_token(self, address):
|
|
||||||
t = None
|
|
||||||
try:
|
|
||||||
t = CICRegistry.get_address(CICRegistry.default_chain_spec, address)
|
|
||||||
return t
|
|
||||||
except UnknownContractError:
|
|
||||||
logg.debug('contract {} not in registry'.format(address))
|
|
||||||
|
|
||||||
# If nothing was returned, we look up the token in the declarator
|
|
||||||
for trust in self.trusts:
|
|
||||||
logg.debug('look up declaration for contract {} with trust {}'.format(address, trust))
|
|
||||||
fn = self.declarator.function('declaration')
|
|
||||||
# TODO: cache trust in LRUcache
|
|
||||||
declaration_array = fn(trust, address).call()
|
|
||||||
try:
|
|
||||||
declaration = to_token_declaration(trust, address, declaration_array, [rubberstamp])
|
|
||||||
logg.debug('found declaration for token {} from trust address {}'.format(address, trust))
|
|
||||||
except UnknownDeclarationError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
|
|
||||||
t = CICRegistry.add_token(self.chain_spec, c)
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
logg.error('declaration for {} validates as token, but location is not ERC20 compatible'.format(address))
|
|
||||||
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
# TODO use input data instead of logs
|
|
||||||
def process(self, w3, session, block):
|
|
||||||
#self.refresh_registry(w3)
|
|
||||||
tx_count = w3.eth.getBlockTransactionCount(block.hash)
|
|
||||||
b = w3.eth.getBlock(block.hash)
|
|
||||||
for i in range(self.tx_height, tx_count):
|
|
||||||
tx = w3.eth.getTransactionByBlock(block.hash, i)
|
|
||||||
if tx.to == None:
|
|
||||||
logg.debug('block {} tx {} is contract creation tx, skipping'.format(block.number, i))
|
|
||||||
continue
|
|
||||||
if len(w3.eth.getCode(tx.to)) == 0:
|
|
||||||
logg.debug('block {} tx {} not a contract tx, skipping'.format(block.number, i))
|
|
||||||
continue
|
|
||||||
|
|
||||||
t = self.check_token(tx.to)
|
|
||||||
if t != None and isinstance(t, Token):
|
|
||||||
r = w3.eth.getTransactionReceipt(tx.hash)
|
|
||||||
for l in r.logs:
|
|
||||||
logg.debug('block {} tx {} {} token log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
|
||||||
if l.topics[0].hex() == log_topics['transfer']:
|
|
||||||
self.__process_tx(w3, session, t, r, l, b)
|
|
||||||
|
|
||||||
# TODO: cache contracts in LRUcache
|
|
||||||
elif self.convert_enabled and tx.to == CICRegistry.get_contract(CICRegistry.default_chain_spec, 'Converter').address:
|
|
||||||
r = w3.eth.getTransactionReceipt(tx.hash)
|
|
||||||
for l in r.logs:
|
|
||||||
logg.info('block {} tx {} {} bancornetwork log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex()))
|
|
||||||
if l.topics[0].hex() == log_topics['convert']:
|
|
||||||
self.__process_convert(w3, session, t, r, l, b)
|
|
||||||
|
|
||||||
session.execute("UPDATE tx_sync SET tx = '{}'".format(tx.hash.hex()))
|
|
||||||
session.commit()
|
|
||||||
self.tx_height += 1
|
|
||||||
|
|
||||||
|
|
||||||
def __get_next_retry(self, backoff=False):
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def loop(self):
|
|
||||||
logg.info('starting at block {} tx index {}'.format(self.block_height, self.tx_height))
|
|
||||||
self.state = RunStateEnum.RUN
|
|
||||||
while self.state == RunStateEnum.RUN:
|
|
||||||
(provider, w3) = web3_constructor()
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
try:
|
|
||||||
block = w3.eth.getBlock(self.block_height)
|
|
||||||
self.process(w3, session, block)
|
|
||||||
self.block_height += 1
|
|
||||||
self.tx_height = 0
|
|
||||||
except BlockNotFound as e:
|
|
||||||
logg.debug('no block {} yet, zZzZ...'.format(self.block_height))
|
|
||||||
time.sleep(self.__get_next_retry())
|
|
||||||
except ConnectionClosedError as e:
|
|
||||||
logg.info('connection gone, retrying')
|
|
||||||
time.sleep(self.__get_next_retry(True))
|
|
||||||
except OSError as e:
|
|
||||||
logg.error('cannot connect {}'.format(e))
|
|
||||||
time.sleep(self.__get_next_retry(True))
|
|
||||||
except Exception as e:
|
|
||||||
session.close()
|
|
||||||
raise(e)
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
|
|
||||||
def load(self, w3):
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
r = session.execute('SELECT tx FROM tx_sync').first()
|
|
||||||
if r != None:
|
|
||||||
if r[0] == '0x{0:0{1}X}'.format(0, 64):
|
|
||||||
logg.debug('last tx was zero-address, starting from scratch')
|
|
||||||
return
|
|
||||||
t = w3.eth.getTransaction(r[0])
|
|
||||||
|
|
||||||
self.block_height = t.blockNumber
|
|
||||||
self.tx_height = t.transactionIndex+1
|
|
||||||
c = w3.eth.getBlockTransactionCount(t.blockHash.hex())
|
|
||||||
logg.debug('last tx processed {} index {} (max index {})'.format(t.blockNumber, t.transactionIndex, c-1))
|
|
||||||
if c == self.tx_height:
|
|
||||||
self.block_height += 1
|
|
||||||
self.tx_height = 0
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
(provider, w3) = web3_constructor()
|
|
||||||
trust = config.get('CIC_TRUST_ADDRESS', "").split(",")
|
|
||||||
chain_spec = args.i
|
|
||||||
|
|
||||||
try:
|
|
||||||
w3.eth.chainId
|
|
||||||
except Exception as e:
|
|
||||||
logg.exception(e)
|
|
||||||
sys.stderr.write('cannot connect to evm node\n')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
|
|
||||||
CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
|
||||||
chain_registry = ChainRegistry(chain_spec)
|
|
||||||
CICRegistry.add_chain_registry(chain_registry)
|
|
||||||
|
|
||||||
t = Tracker(chain_spec, trust)
|
|
||||||
t.load(w3)
|
|
||||||
t.loop()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -2,14 +2,17 @@
|
|||||||
import celery
|
import celery
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.cache import BloomCache
|
from cic_cache.cache import (
|
||||||
|
BloomCache,
|
||||||
|
DataCache,
|
||||||
|
)
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True)
|
@celery_app.task(bind=True)
|
||||||
def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
def tx_filter(self, offset, limit, address=None, oldest=False, encoding='hex'):
|
||||||
queue = self.request.delivery_info.get('routing_key')
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
@@ -17,9 +20,9 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
|||||||
c = BloomCache(session)
|
c = BloomCache(session)
|
||||||
b = None
|
b = None
|
||||||
if address == None:
|
if address == None:
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit, oldest=oldest)
|
||||||
else:
|
else:
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit, oldest=oldest)
|
||||||
|
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
@@ -35,4 +38,17 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
|||||||
return o
|
return o
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def tx_filter_content(self, offset, limit, address=None, block_offset=None, block_limit=None, oldest=False, encoding='hex'):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
c = DataCache(session)
|
||||||
|
b = None
|
||||||
|
if address == None:
|
||||||
|
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||||
|
else:
|
||||||
|
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data_index(address, offset, limit, block_offset=block_offset, block_limit=block_limit)
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return (lowest_block, highest_block, tx_cache,)
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import semver
|
|||||||
version = (
|
version = (
|
||||||
0,
|
0,
|
||||||
2,
|
2,
|
||||||
0,
|
1,
|
||||||
'alpha.1',
|
'alpha.2',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
[bancor]
|
|
||||||
dir =
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
chain_spec =
|
|
||||||
trust_address =
|
trust_address =
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
DEBUG=
|
DEBUG=0
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
[bancor]
|
|
||||||
registry_address =
|
|
||||||
dir = /usr/local/share/bancor
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
chain_spec = evm:bloxberg:8996
|
|
||||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||||
@@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=63432
|
PORT=63432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
DEBUG=1
|
DEBUG=0
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
[eth]
|
|
||||||
provider = ws://localhost:63546
|
|
||||||
chain_id = 8996
|
|
||||||
4
apps/cic-cache/config/docker/syncer.ini
Normal file
4
apps/cic-cache/config/docker/syncer.ini
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[syncer]
|
||||||
|
loop_interval = 1
|
||||||
|
offset = 0
|
||||||
|
no_history = 0
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
[eth]
|
|
||||||
provider = ws://localhost:8545
|
|
||||||
#ttp_provider = http://localhost:8545
|
|
||||||
#provider = http://localhost:8545
|
|
||||||
gas_provider_address =
|
|
||||||
#chain_id =
|
|
||||||
abi_dir = /usr/local/share/cic/solidity/abi
|
|
||||||
@@ -1,2 +1,4 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
|
chain_spec =
|
||||||
|
trust_address =
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
DEBUG=
|
DEBUG=1
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
[SYNCER]
|
[syncer]
|
||||||
loop_interval = 1
|
loop_interval = 1
|
||||||
@@ -1,53 +1,39 @@
|
|||||||
FROM python:3.8.6-slim-buster
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||||
|
|
||||||
#COPY --from=0 /usr/local/share/cic/solidity/ /usr/local/share/cic/solidity/
|
COPY requirements.txt .
|
||||||
|
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||||
|
#RUN pip install $pip_extra_index_url_flag .
|
||||||
|
#RUN pip install .[server]
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-cache
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
ARG EXTRA_PIP_ARGS=""
|
||||||
|
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
|
pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
|
COPY . .
|
||||||
ARG root_requirement_file='requirements.txt'
|
|
||||||
|
|
||||||
#RUN apk update && \
|
RUN python setup.py install
|
||||||
# apk add gcc musl-dev gnupg libpq
|
|
||||||
#RUN apk add postgresql-dev
|
|
||||||
#RUN apk add linux-headers
|
|
||||||
#RUN apk add libffi-dev
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
|
||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44
|
|
||||||
|
|
||||||
COPY cic-cache/requirements.txt ./
|
|
||||||
COPY cic-cache/setup.cfg \
|
|
||||||
cic-cache/setup.py \
|
|
||||||
./
|
|
||||||
COPY cic-cache/cic_cache/ ./cic_cache/
|
|
||||||
COPY cic-cache/scripts/ ./scripts/
|
|
||||||
COPY cic-cache/test_requirements.txt ./
|
|
||||||
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
|
||||||
RUN pip install $pip_extra_index_url_flag .
|
|
||||||
RUN pip install .[server]
|
|
||||||
|
|
||||||
COPY cic-cache/tests/ ./tests/
|
|
||||||
#COPY db/ cic-cache/db
|
|
||||||
#RUN apk add postgresql-client
|
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
# they can all be overridden by environment variables
|
# they can all be overridden by environment variables
|
||||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
COPY cic-cache/config/ /usr/local/etc/cic-cache/
|
COPY config/ /usr/local/etc/cic-cache/
|
||||||
|
|
||||||
# for db migrations
|
# for db migrations
|
||||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
|
||||||
mkdir -p /usr/local/share/cic/solidity && \
|
|
||||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
|
||||||
|
|
||||||
|
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||||
|
COPY /docker/db.sh ./db.sh
|
||||||
|
RUN chmod 755 ./*.sh
|
||||||
# Tracker
|
# Tracker
|
||||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||||
# Server
|
# Server
|
||||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||||
|
ENTRYPOINT []
|
||||||
|
|||||||
6
apps/cic-cache/docker/db.sh
Normal file
6
apps/cic-cache/docker/db.sh
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
>&2 echo executing database migration
|
||||||
|
python scripts/migrate.py -c /usr/local/etc/cic-cache --migrations-dir /usr/local/share/cic-cache/alembic -vv
|
||||||
|
set +e
|
||||||
10
apps/cic-cache/docker/run_tests.sh
Normal file
10
apps/cic-cache/docker/run_tests.sh
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||||
|
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||||
|
-r test_requirements.txt
|
||||||
|
|
||||||
|
export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||||
|
|
||||||
10
apps/cic-cache/docker/start_tracker.sh
Normal file
10
apps/cic-cache/docker/start_tracker.sh
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. ./db.sh
|
||||||
|
|
||||||
|
if [ $? -ne "0" ]; then
|
||||||
|
>&2 echo db migrate fail
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
/usr/local/bin/cic-cache-trackerd $@
|
||||||
@@ -1,10 +1,14 @@
|
|||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini~=0.3.6b2
|
confini>=0.3.6rc4,<0.5.0
|
||||||
uwsgi==2.0.19.1
|
uwsgi==2.0.19.1
|
||||||
moolb~=0.1.0
|
moolb~=0.1.1b2
|
||||||
cic-registry~=0.5.3a4
|
cic-eth-registry~=0.6.1a1
|
||||||
SQLAlchemy==1.3.20
|
SQLAlchemy==1.3.20
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
|
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||||
|
erc20-faucet>=0.3.2a2, <0.4.0
|
||||||
|
chainlib-eth>=0.0.9a14,<0.1.0
|
||||||
|
eth-address-index>=0.2.3a4,<0.3.0
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
import alembic
|
import alembic
|
||||||
from alembic.config import Config as AlembicConfig
|
from alembic.config import Config as AlembicConfig
|
||||||
@@ -23,6 +24,8 @@ argparser = argparse.ArgumentParser()
|
|||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||||
|
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||||
|
argparser.add_argument('-f', action='store_true', help='force action')
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
@@ -53,4 +56,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
|||||||
ac.set_main_option('sqlalchemy.url', dsn)
|
ac.set_main_option('sqlalchemy.url', dsn)
|
||||||
ac.set_main_option('script_location', migrations_dir)
|
ac.set_main_option('script_location', migrations_dir)
|
||||||
|
|
||||||
|
if args.reset:
|
||||||
|
if not args.f:
|
||||||
|
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
|
||||||
|
logg.error('user chickened out on requested reset, bailing')
|
||||||
|
sys.exit(1)
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
alembic.command.upgrade(ac, 'head')
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|||||||
@@ -23,17 +23,22 @@ licence_files =
|
|||||||
|
|
||||||
[options]
|
[options]
|
||||||
python_requires = >= 3.6
|
python_requires = >= 3.6
|
||||||
|
include_package_data = True
|
||||||
packages =
|
packages =
|
||||||
cic_cache
|
cic_cache
|
||||||
cic_cache.tasks
|
cic_cache.tasks
|
||||||
cic_cache.db
|
cic_cache.db
|
||||||
cic_cache.db.models
|
cic_cache.db.models
|
||||||
|
cic_cache.cli
|
||||||
cic_cache.runnable
|
cic_cache.runnable
|
||||||
|
cic_cache.runnable.daemons
|
||||||
|
cic_cache.runnable.daemons.filters
|
||||||
scripts =
|
scripts =
|
||||||
./scripts/migrate.py
|
./scripts/migrate.py
|
||||||
|
|
||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
cic-cache-trackerd = cic_cache.runnable.tracker:main
|
cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main
|
||||||
cic-cache-serverd = cic_cache.runnable.server:main
|
cic-cache-serverd = cic_cache.runnable.daemons.server:main
|
||||||
cic-cache-taskerd = cic_cache.runnable.tasker:main
|
cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main
|
||||||
|
cic-cache-list = cic_cache.runable.list:main
|
||||||
|
|||||||
@@ -4,3 +4,7 @@ pytest-mock==3.3.1
|
|||||||
pysqlite3==0.4.3
|
pysqlite3==0.4.3
|
||||||
sqlparse==0.4.1
|
sqlparse==0.4.1
|
||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
|
eth_tester==0.5.0b3
|
||||||
|
py-evm==0.3.0a20
|
||||||
|
sarafu-faucet~=0.0.7a1
|
||||||
|
erc20-transfer-authorization>=0.3.5a1,<0.4.0
|
||||||
|
|||||||
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import chainlib.cli
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
config_dir = os.path.join(script_dir, '..', 'testdata', 'config')
|
||||||
|
|
||||||
|
|
||||||
|
def test_argumentparserto_config():
|
||||||
|
|
||||||
|
argparser = cic_cache.cli.ArgumentParser()
|
||||||
|
|
||||||
|
local_flags = 0xffff
|
||||||
|
argparser.process_local_flags(local_flags)
|
||||||
|
argparser.add_argument('--foo', type=str)
|
||||||
|
args = argparser.parse_args([
|
||||||
|
'-q', 'baz',
|
||||||
|
'--offset', '13',
|
||||||
|
'--no-history',
|
||||||
|
'-r','0xdeadbeef',
|
||||||
|
'-vv',
|
||||||
|
'--foo', 'bar',
|
||||||
|
])
|
||||||
|
|
||||||
|
extra_args = {
|
||||||
|
'foo': '_BARBARBAR',
|
||||||
|
}
|
||||||
|
config = cic_cache.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
|
||||||
|
|
||||||
|
assert config.get('_BARBARBAR') == 'bar'
|
||||||
|
assert config.get('CELERY_QUEUE') == 'baz'
|
||||||
|
assert config.get('SYNCER_NO_HISTORY') == True
|
||||||
|
assert config.get('SYNCER_OFFSET') == 13
|
||||||
|
assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'
|
||||||
|
|
||||||
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# standard imports
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_celery():
|
||||||
|
cf = tempfile.mkdtemp()
|
||||||
|
|
||||||
|
config = {
|
||||||
|
'CELERY_RESULT_URL': 'filesystem://' + cf,
|
||||||
|
}
|
||||||
|
cic_cache.cli.CeleryApp.from_config(config)
|
||||||
|
|
||||||
|
config['CELERY_BROKER_URL'] = 'filesystem://' + cf
|
||||||
|
cic_cache.cli.CeleryApp.from_config(config)
|
||||||
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from chainlib.eth.gas import (
|
||||||
|
Gas,
|
||||||
|
RPCGasOracle,
|
||||||
|
)
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_latest,
|
||||||
|
Block,
|
||||||
|
)
|
||||||
|
from chainlib.eth.pytest.fixtures_chain import default_chain_spec
|
||||||
|
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||||
|
from hexathon import add_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail()
|
||||||
|
def test_cli_rpc(
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
default_chain_spec,
|
||||||
|
):
|
||||||
|
config = {
|
||||||
|
'CHAIN_SPEC': str(default_chain_spec),
|
||||||
|
'RPC_HTTP_PROVIDER': 'http://localhost:8545',
|
||||||
|
}
|
||||||
|
rpc = cic_cache.cli.RPC.from_config(config, default_label='foo')
|
||||||
|
conn = rpc.get_by_label('foo')
|
||||||
|
#o = block_latest()
|
||||||
|
#conn.do(o)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_chain(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
contract_roles,
|
||||||
|
):
|
||||||
|
ifc = cic_cache.cli.EthChainInterface()
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||||
|
gas_oracle = RPCGasOracle(conn=eth_rpc)
|
||||||
|
c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
|
||||||
|
recipient = add_0x(os.urandom(20).hex())
|
||||||
|
(tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], recipient, 1024)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
|
||||||
|
o = ifc.tx_receipt(r)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
o = ifc.block_by_number(1)
|
||||||
|
block_src = eth_rpc.do(o)
|
||||||
|
block = ifc.block_from_src(block_src)
|
||||||
|
assert block.number == 1
|
||||||
|
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
assert block_src['gasUsed'] == 21000
|
||||||
|
assert block_src['gas_used'] == 21000
|
||||||
|
|
||||||
|
block_src = ifc.src_normalize(block_src)
|
||||||
|
assert block_src['gasUsed'] == 21000
|
||||||
|
assert block_src['gas_used'] == 21000
|
||||||
|
|
||||||
@@ -3,11 +3,14 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
import moolb
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache import db
|
from cic_cache import db
|
||||||
|
from cic_cache import BloomCache
|
||||||
|
from cic_cache.cache import DEFAULT_FILTER_SIZE
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.dirname(script_dir)
|
root_dir = os.path.dirname(script_dir)
|
||||||
@@ -61,7 +64,6 @@ def txs(
|
|||||||
dt.timestamp(),
|
dt.timestamp(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
tx_number = 42
|
tx_number = 42
|
||||||
tx_hash_second = '0x' + os.urandom(32).hex()
|
tx_hash_second = '0x' + os.urandom(32).hex()
|
||||||
tx_signed_second = '0x' + os.urandom(128).hex()
|
tx_signed_second = '0x' + os.urandom(128).hex()
|
||||||
@@ -84,3 +86,62 @@ def txs(
|
|||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return [
|
||||||
|
tx_hash_first,
|
||||||
|
tx_hash_second,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def more_txs(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = init_database
|
||||||
|
|
||||||
|
tx_number = 666
|
||||||
|
tx_hash = '0x' + os.urandom(32).hex()
|
||||||
|
tx_signed = '0x' + os.urandom(128).hex()
|
||||||
|
nonce = 3
|
||||||
|
|
||||||
|
dt = datetime.datetime.utcnow()
|
||||||
|
dt += datetime.timedelta(hours=1)
|
||||||
|
db.add_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash,
|
||||||
|
list_defaults['block']+2,
|
||||||
|
tx_number,
|
||||||
|
list_actors['alice'],
|
||||||
|
list_actors['diane'],
|
||||||
|
list_tokens['bar'],
|
||||||
|
list_tokens['bar'],
|
||||||
|
2048,
|
||||||
|
4096,
|
||||||
|
False,
|
||||||
|
dt.timestamp(),
|
||||||
|
)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
return [tx_hash] + txs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def tag_txs(
|
||||||
|
init_database,
|
||||||
|
txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
db.add_tag(init_database, 'taag', domain='test')
|
||||||
|
init_database.commit()
|
||||||
|
|
||||||
|
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def zero_filter():
|
||||||
|
return moolb.Bloom(DEFAULT_FILTER_SIZE, 3)
|
||||||
|
|||||||
3
apps/cic-cache/tests/filters/conftest.py
Normal file
3
apps/cic-cache/tests/filters/conftest.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from chainlib.eth.pytest import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
|
||||||
173
apps/cic-cache/tests/filters/test_erc20.py
Normal file
173
apps/cic-cache/tests/filters/test_erc20.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy import text
|
||||||
|
from chainlib.eth.tx import Tx
|
||||||
|
from chainlib.eth.block import Block
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.error import RequestMismatchException
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||||
|
from cic_cache.runnable.daemons.filters.base import TagSyncFilter
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_base_filter_str(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
f = TagSyncFilter('foo')
|
||||||
|
assert 'foo' == str(f)
|
||||||
|
f = TagSyncFilter('foo', domain='bar')
|
||||||
|
assert 'bar.foo' == str(f)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_filter(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
assert r
|
||||||
|
|
||||||
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
|
assert r[0] == tx.hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_filter_nocontract(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': os.urandom(20).hex(),
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'contract_method,contract_input,expected_exception',
|
||||||
|
[
|
||||||
|
('a9059cbb', os.urandom(32).hex(), ValueError), # not enough args
|
||||||
|
('a9059cbb', os.urandom(31).hex(), ValueError), # wrong arg boundary
|
||||||
|
('a9059cbc', os.urandom(64).hex(), RequestMismatchException), # wrong method
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_erc20_filter_bogus(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
contract_method,
|
||||||
|
contract_input,
|
||||||
|
expected_exception,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = contract_method
|
||||||
|
data += contract_input
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
71
apps/cic-cache/tests/filters/test_faucet.py
Normal file
71
apps/cic-cache/tests/filters/test_faucet.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_by_hash,
|
||||||
|
Block,
|
||||||
|
)
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
receipt,
|
||||||
|
unpack,
|
||||||
|
transaction,
|
||||||
|
Tx,
|
||||||
|
)
|
||||||
|
from hexathon import strip_0x
|
||||||
|
from erc20_faucet.faucet import SingleShotFaucet
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
from cic_cache.runnable.daemons.filters.faucet import FaucetFilter
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_faucet(
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
foo_token,
|
||||||
|
faucet_noregistry,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
contract_roles,
|
||||||
|
agent_roles,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = FaucetFilter(chain_spec, contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
|
||||||
|
c = SingleShotFaucet(chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.give_to(faucet_noregistry, agent_roles['ALICE'], agent_roles['ALICE'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
|
||||||
|
tx_src = unpack(bytes.fromhex(strip_0x(o['params'][0])), chain_spec)
|
||||||
|
|
||||||
|
o = receipt(r)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
rcpt = Tx.src_normalize(r)
|
||||||
|
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
o = block_by_hash(r['block_hash'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
block_object = Block(r)
|
||||||
|
|
||||||
|
tx = Tx(tx_src, block_object)
|
||||||
|
tx.apply_receipt(rcpt)
|
||||||
|
|
||||||
|
r = fltr.filter(eth_rpc, block_object, tx, init_database)
|
||||||
|
assert r
|
||||||
|
|
||||||
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
|
assert r[0] == tx.hash
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import confini
|
import confini
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
|
|||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def load_config():
|
def load_config():
|
||||||
config_dir = os.path.join(root_dir, '.config/test')
|
config_dir = os.path.join(root_dir, 'config/test')
|
||||||
conf = confini.Config(config_dir, 'CICTEST')
|
conf = confini.Config(config_dir, 'CICTEST')
|
||||||
conf.process()
|
conf.process()
|
||||||
logg.debug('config {}'.format(conf))
|
logg.debug('config {}'.format(conf))
|
||||||
|
|||||||
@@ -3,13 +3,16 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import sqlparse
|
import sqlparse
|
||||||
|
import alembic
|
||||||
|
from alembic.config import Config as AlembicConfig
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
@@ -26,11 +29,10 @@ def database_engine(
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
dsn = dsn_from_config(load_config)
|
dsn = dsn_from_config(load_config)
|
||||||
SessionBase.connect(dsn)
|
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
|
||||||
return dsn
|
return dsn
|
||||||
|
|
||||||
|
|
||||||
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def init_database(
|
def init_database(
|
||||||
load_config,
|
load_config,
|
||||||
@@ -38,52 +40,23 @@ def init_database(
|
|||||||
):
|
):
|
||||||
|
|
||||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||||
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
|
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
if not os.path.isdir(migrationsdir):
|
||||||
rconn = SessionBase.engine.raw_connection()
|
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
logg.info('using migrations directory {}'.format(migrationsdir))
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
rconn.executescript(s)
|
|
||||||
|
|
||||||
else:
|
|
||||||
rconn = SessionBase.engine.raw_connection()
|
|
||||||
rcursor = rconn.cursor()
|
|
||||||
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
|
|
||||||
|
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
r = re.compile(r'^[A-Z]', re.MULTILINE)
|
|
||||||
for l in sqlparse.parse(s):
|
|
||||||
strl = str(l)
|
|
||||||
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
|
|
||||||
if not re.search(r, strl):
|
|
||||||
logg.warning('skipping parsed query line {}'.format(strl))
|
|
||||||
continue
|
|
||||||
rcursor.execute(strl)
|
|
||||||
rconn.commit()
|
|
||||||
|
|
||||||
rcursor.execute('SET search_path TO public')
|
|
||||||
|
|
||||||
# this doesn't work when run separately, no idea why
|
|
||||||
# functions have been manually added to original schema from cic-eth
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
#
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
|
|
||||||
rcursor.close()
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
|
||||||
|
ac.set_main_option('sqlalchemy.url', database_engine)
|
||||||
|
ac.set_main_option('script_location', migrationsdir)
|
||||||
|
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
yield session
|
yield session
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
@@ -116,3 +89,14 @@ def list_defaults(
|
|||||||
return {
|
return {
|
||||||
'block': 420000,
|
'block': 420000,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def tags(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
|
||||||
|
add_tag(init_database, 'foo')
|
||||||
|
add_tag(init_database, 'baz', domain='bar')
|
||||||
|
add_tag(init_database, 'xyzzy', domain='bar')
|
||||||
|
init_database.commit()
|
||||||
|
|||||||
31
apps/cic-cache/tests/test_api.py
Normal file
31
apps/cic-cache/tests/test_api.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# standard imports
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.runnable.daemons.query import process_transactions_all_data
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_all_data(
|
||||||
|
init_database,
|
||||||
|
txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': '/txa/410000/420000',
|
||||||
|
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||||
|
}
|
||||||
|
j = process_transactions_all_data(init_database, env)
|
||||||
|
o = json.loads(j[1])
|
||||||
|
|
||||||
|
assert len(o['data']) == 2
|
||||||
|
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': '/txa/420000/410000',
|
||||||
|
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
j = process_transactions_all_data(init_database, env)
|
||||||
@@ -4,11 +4,13 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
from cic_cache import db
|
||||||
from cic_cache import BloomCache
|
from cic_cache import BloomCache
|
||||||
|
from cic_cache.cache import DataCache
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@@ -17,7 +19,6 @@ def test_cache(
|
|||||||
init_database,
|
init_database,
|
||||||
list_defaults,
|
list_defaults,
|
||||||
list_actors,
|
list_actors,
|
||||||
list_tokens,
|
|
||||||
txs,
|
txs,
|
||||||
):
|
):
|
||||||
|
|
||||||
@@ -33,3 +34,219 @@ def test_cache(
|
|||||||
|
|
||||||
assert b[0] == list_defaults['block'] - 1
|
assert b[0] == list_defaults['block'] - 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_data(
|
||||||
|
init_database,
|
||||||
|
txs,
|
||||||
|
tag_txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = init_database
|
||||||
|
|
||||||
|
c = DataCache(session)
|
||||||
|
b = c.load_transactions_with_data(0, 3) #410000, 420000) #, 100, block_offset=410000, block_limit=420000, oldest=True)
|
||||||
|
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == txs[0]
|
||||||
|
assert b[2][0]['tx_type'] == 'unknown'
|
||||||
|
assert b[2][1]['tx_type'] == 'test.taag'
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ranges(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
more_txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = init_database
|
||||||
|
|
||||||
|
oldest = list_defaults['block'] - 1
|
||||||
|
mid = list_defaults['block']
|
||||||
|
newest = list_defaults['block'] + 2
|
||||||
|
|
||||||
|
c = BloomCache(session)
|
||||||
|
b = c.load_transactions(0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions(1, 2)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 2)
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 1)
|
||||||
|
assert b[0] == newest
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 100, oldest=True)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
|
||||||
|
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
|
||||||
|
# now check when supplying account
|
||||||
|
b = c.load_transactions_account(list_actors['alice'], 0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions_account(list_actors['bob'], 0, 100)
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == mid
|
||||||
|
|
||||||
|
b = c.load_transactions_account(list_actors['diane'], 0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
# add block filter to the mix
|
||||||
|
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
|
||||||
|
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == mid
|
||||||
|
|
||||||
|
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == oldest
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_ranges_data(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
more_txs,
|
||||||
|
):
|
||||||
|
|
||||||
|
session = init_database
|
||||||
|
|
||||||
|
oldest = list_defaults['block'] - 1
|
||||||
|
mid = list_defaults['block']
|
||||||
|
newest = list_defaults['block'] + 2
|
||||||
|
|
||||||
|
c = DataCache(session)
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 3
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(1, 2)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 2)
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 1)
|
||||||
|
assert b[0] == newest
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 1
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 100, oldest=True)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 3
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
assert b[2][2]['tx_hash'] == more_txs[0]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||||
|
|
||||||
|
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == mid
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
# now check when supplying account
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 3
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||||
|
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == mid
|
||||||
|
assert len(b[2]) == 1
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||||
|
|
||||||
|
# add block filter to the mix
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == newest
|
||||||
|
assert len(b[2]) == 2
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||||
|
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == mid
|
||||||
|
assert b[1] == mid
|
||||||
|
assert len(b[2]) == 1
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||||
|
|
||||||
|
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||||
|
assert b[0] == oldest
|
||||||
|
assert b[1] == oldest
|
||||||
|
assert len(b[2]) == 1
|
||||||
|
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||||
|
|||||||
230
apps/cic-cache/tests/test_query.py
Normal file
230
apps/cic-cache/tests/test_query.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import copy
|
||||||
|
import re
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.runnable.daemons.query import *
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re',
|
||||||
|
[
|
||||||
|
('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/user/', 'alice', 0, 42, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/user/', 'alice', 0, 42, 3, 13, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/', None, 0, None, 3, None, 5, re_transactions_all_bloom),
|
||||||
|
('/tx/', None, 0, 42, 3, None, 5, re_transactions_all_bloom),
|
||||||
|
('/tx/', None, 0, 42, 3, 13, 5, re_transactions_all_bloom),
|
||||||
|
('/txa/', None, 0, None, 3, None, 5, re_transactions_all_data),
|
||||||
|
('/txa/', None, 0, 42, 3, None, 5, re_transactions_all_data),
|
||||||
|
('/txa/', None, 0, 42, 3, 13, 5, re_transactions_all_data),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_regex(
|
||||||
|
list_actors,
|
||||||
|
query_path_prefix,
|
||||||
|
query_role,
|
||||||
|
query_address_index,
|
||||||
|
query_offset,
|
||||||
|
query_offset_index,
|
||||||
|
query_limit,
|
||||||
|
query_limit_index,
|
||||||
|
match_re,
|
||||||
|
):
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
path = query_path_prefix
|
||||||
|
query_address = None
|
||||||
|
if query_role != None:
|
||||||
|
query_address = strip_0x(list_actors[query_role])
|
||||||
|
paths.append(path + '0x' + query_address)
|
||||||
|
paths.append(path + query_address)
|
||||||
|
if query_offset != None:
|
||||||
|
if query_limit != None:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/{}/{}'.format(query_offset, query_limit)
|
||||||
|
else:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/' + str(query_offset)
|
||||||
|
|
||||||
|
for i in range(len(paths)):
|
||||||
|
paths.append(paths[i] + '/')
|
||||||
|
|
||||||
|
for p in paths:
|
||||||
|
logg.debug('testing path {} against {}'.format(p, match_re))
|
||||||
|
m = re.match(match_re, p)
|
||||||
|
l = len(m.groups())
|
||||||
|
logg.debug('laast index match {} groups {}'.format(m.lastindex, l))
|
||||||
|
for i in range(l+1):
|
||||||
|
logg.debug('group {} {}'.format(i, m[i]))
|
||||||
|
if m.lastindex >= query_offset_index:
|
||||||
|
assert query_offset == int(m[query_offset_index + 1])
|
||||||
|
if m.lastindex >= query_limit_index:
|
||||||
|
assert query_limit == int(m[query_limit_index + 1])
|
||||||
|
if query_address_index != None:
|
||||||
|
match_address = strip_0x(m[query_address_index + 1])
|
||||||
|
assert query_address == match_address
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'role_name, query_offset, query_limit, query_match',
|
||||||
|
[
|
||||||
|
('alice', None, None, [(420000, 13), (419999, 42)]),
|
||||||
|
('alice', None, 1, [(420000, 13)]),
|
||||||
|
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||||
|
('alice', 2, None, []), # 420000 == list_defaults['block']
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_account(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
role_name,
|
||||||
|
query_offset,
|
||||||
|
query_limit,
|
||||||
|
query_match,
|
||||||
|
):
|
||||||
|
|
||||||
|
actor = None
|
||||||
|
try:
|
||||||
|
actor = list_actors[role_name]
|
||||||
|
except KeyError:
|
||||||
|
actor = os.urandom(20).hex()
|
||||||
|
path_info = '/tx/user/0x' + strip_0x(actor)
|
||||||
|
if query_offset != None:
|
||||||
|
path_info += '/' + str(query_offset)
|
||||||
|
if query_limit != None:
|
||||||
|
if query_offset == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_limit)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_account_bloom(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||||
|
zero_filter_data = zero_filter.to_bytes()
|
||||||
|
if len(query_match) == 0:
|
||||||
|
assert block_filter_data == zero_filter_data
|
||||||
|
return
|
||||||
|
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
block_filter = copy.copy(zero_filter)
|
||||||
|
block_filter.merge(block_filter_data)
|
||||||
|
block_filter_data = block_filter.to_bytes()
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
|
||||||
|
for (block, tx) in query_match:
|
||||||
|
block = block.to_bytes(4, byteorder='big')
|
||||||
|
assert block_filter.check(block)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_offset, query_limit, query_match',
|
||||||
|
[
|
||||||
|
(None, 2, [(420000, 13), (419999, 42)]),
|
||||||
|
(0, 1, [(420000, 13)]),
|
||||||
|
(1, 1, [(419999, 42)]),
|
||||||
|
(2, 0, []),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_bloom(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
query_offset,
|
||||||
|
query_limit,
|
||||||
|
query_match,
|
||||||
|
):
|
||||||
|
|
||||||
|
path_info = '/tx'
|
||||||
|
if query_offset != None:
|
||||||
|
path_info += '/' + str(query_offset)
|
||||||
|
if query_limit != None:
|
||||||
|
if query_offset == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_limit)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_all_bloom(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||||
|
zero_filter_data = zero_filter.to_bytes()
|
||||||
|
if len(query_match) == 0:
|
||||||
|
assert block_filter_data == zero_filter_data
|
||||||
|
return
|
||||||
|
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
block_filter = copy.copy(zero_filter)
|
||||||
|
block_filter.merge(block_filter_data)
|
||||||
|
block_filter_data = block_filter.to_bytes()
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
|
||||||
|
for (block, tx) in query_match:
|
||||||
|
block = block.to_bytes(4, byteorder='big')
|
||||||
|
assert block_filter.check(block)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_block_start, query_block_end, query_match_count',
|
||||||
|
[
|
||||||
|
(None, 42, 0),
|
||||||
|
(420000, 420001, 1),
|
||||||
|
(419999, 419999, 1), # matches are inclusive
|
||||||
|
(419999, 420000, 2),
|
||||||
|
(419999, 420001, 2),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_data(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
query_block_start,
|
||||||
|
query_block_end,
|
||||||
|
query_match_count,
|
||||||
|
):
|
||||||
|
|
||||||
|
path_info = '/txa'
|
||||||
|
if query_block_start != None:
|
||||||
|
path_info += '/' + str(query_block_start)
|
||||||
|
if query_block_end != None:
|
||||||
|
if query_block_start == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_block_end)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_all_data(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
assert len(o['data']) == query_match_count
|
||||||
37
apps/cic-cache/tests/test_tag.py
Normal file
37
apps/cic-cache/tests/test_tag.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import tag_transaction
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
tag_transaction(init_database, txs[0], 'foo')
|
||||||
|
tag_transaction(init_database, txs[0], 'baz', domain='bar')
|
||||||
|
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
|
||||||
|
assert r[0][0] == txs[1]
|
||||||
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[foo]
|
||||||
|
bar_baz = xyzzy
|
||||||
1
apps/cic-eth-aux/erc20-demurrage-token/MANIFEST.in
Normal file
1
apps/cic-eth-aux/erc20-demurrage-token/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
|||||||
|
include *requirements.txt
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from erc20_demurrage_token.demurrage import DemurrageCalculator
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
class NoopCalculator:
|
||||||
|
|
||||||
|
def amount_since(self, amount, timestamp):
|
||||||
|
logg.debug('noopcalculator amount {} timestamp {}'.format(amount, timestamp))
|
||||||
|
return amount
|
||||||
|
|
||||||
|
|
||||||
|
class DemurrageCalculationTask(celery.Task):
|
||||||
|
|
||||||
|
demurrage_token_calcs = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_token(cls, rpc, chain_spec, token_symbol, sender_address=ZERO_ADDRESS):
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
token_address = registry.by_name(token_symbol, sender_address=sender_address)
|
||||||
|
try:
|
||||||
|
c = DemurrageCalculator.from_contract(rpc, chain_spec, token_address, sender_address=sender_address)
|
||||||
|
logg.info('found demurrage calculator for ERC20 {} @ {}'.format(token_symbol, token_address))
|
||||||
|
except:
|
||||||
|
logg.warning('Token {} at address {} does not appear to be a demurrage contract. Calls to balance adjust for this token will always return the same amount'.format(token_symbol, token_address))
|
||||||
|
c = NoopCalculator()
|
||||||
|
|
||||||
|
cls.demurrage_token_calcs[token_symbol] = c
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=DemurrageCalculationTask)
|
||||||
|
def get_adjusted_balance(self, token_symbol, amount, timestamp):
|
||||||
|
c = self.demurrage_token_calcs[token_symbol]
|
||||||
|
return c.amount_since(amount, timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def aux_setup(rpc, config, sender_address=ZERO_ADDRESS):
|
||||||
|
chain_spec_str = config.get('CHAIN_SPEC')
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_spec_str)
|
||||||
|
token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||||
|
|
||||||
|
DemurrageCalculationTask.register_token(rpc, chain_spec, token_symbol, sender_address=sender_address)
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from cic_eth.api.base import ApiBase
|
||||||
|
|
||||||
|
app = celery.current_app
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Api(ApiBase):
|
||||||
|
|
||||||
|
def get_adjusted_balance(self, token_symbol, balance, timestamp):
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||||
|
[
|
||||||
|
token_symbol,
|
||||||
|
balance,
|
||||||
|
timestamp,
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s.link(self.callback_success)
|
||||||
|
s.link.on_error(self.callback_error)
|
||||||
|
|
||||||
|
t = s.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
4
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
4
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
celery==4.4.7
|
||||||
|
erc20-demurrage-token~=0.0.3a1
|
||||||
|
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||||
|
cic-eth[services]~=0.12.4a8
|
||||||
30
apps/cic-eth-aux/erc20-demurrage-token/setup.cfg
Normal file
30
apps/cic-eth-aux/erc20-demurrage-token/setup.cfg
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cic-eth-aux-erc20-demurrage-token
|
||||||
|
version = 0.0.2a6
|
||||||
|
description = cic-eth tasks supporting erc20 demurrage token
|
||||||
|
author = Louis Holbrook
|
||||||
|
author_email = dev@holbrook.no
|
||||||
|
url = https://gitlab.com/ccicnet/erc20-demurrage-token
|
||||||
|
keywords =
|
||||||
|
ethereum
|
||||||
|
blockchain
|
||||||
|
cryptocurrency
|
||||||
|
erc20
|
||||||
|
classifiers =
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Operating System :: OS Independent
|
||||||
|
Development Status :: 3 - Alpha
|
||||||
|
Environment :: No Input/Output (Daemon)
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||||
|
Topic :: Internet
|
||||||
|
#Topic :: Blockchain :: EVM
|
||||||
|
license = GPL3
|
||||||
|
licence_files =
|
||||||
|
LICENSE
|
||||||
|
|
||||||
|
[options]
|
||||||
|
include_package_data = True
|
||||||
|
python_requires = >= 3.6
|
||||||
|
packages =
|
||||||
|
cic_eth_aux.erc20_demurrage_token
|
||||||
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
requirements = []
|
||||||
|
f = open('requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
test_requirements = []
|
||||||
|
f = open('test_requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
test_requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
install_requires=requirements,
|
||||||
|
tests_require=test_requirements,
|
||||||
|
)
|
||||||
11
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
11
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
pytest==6.0.1
|
||||||
|
pytest-celery==0.0.0a1
|
||||||
|
pytest-mock==3.3.1
|
||||||
|
pytest-cov==2.10.1
|
||||||
|
eth-tester==0.5.0b3
|
||||||
|
py-evm==0.3.0a20
|
||||||
|
SQLAlchemy==1.3.20
|
||||||
|
liveness~=0.0.1a7
|
||||||
|
eth-accounts-index==0.1.1a1
|
||||||
|
eth-contract-registry==0.5.8a1
|
||||||
|
eth-address-index==0.2.1a1
|
||||||
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.pytest.fixtures_chain import *
|
||||||
|
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
from erc20_demurrage_token.unittest.base import TestTokenDeploy
|
||||||
|
from erc20_demurrage_token.token import DemurrageToken
|
||||||
|
from eth_token_index.index import TokenUniqueSymbolIndex
|
||||||
|
from eth_address_declarator.declarator import AddressDeclarator
|
||||||
|
|
||||||
|
# cic-eth imports
|
||||||
|
from cic_eth.pytest.fixtures_celery import *
|
||||||
|
from cic_eth.pytest.fixtures_token import *
|
||||||
|
from cic_eth.pytest.fixtures_config import *
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
token_registry,
|
||||||
|
contract_roles,
|
||||||
|
eth_signer,
|
||||||
|
):
|
||||||
|
d = TestTokenDeploy(eth_rpc, token_symbol='BAR', token_name='Bar Token')
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||||
|
c = DemurrageToken(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
token_address = d.deploy(eth_rpc, contract_roles['CONTRACT_DEPLOYER'], c, 'SingleNocap')
|
||||||
|
logg.debug('demurrage token contract "BAR" deployed to {}'.format(token_address))
|
||||||
|
|
||||||
|
return token_address
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token_symbol(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
demurrage_token,
|
||||||
|
contract_roles,
|
||||||
|
):
|
||||||
|
|
||||||
|
c = DemurrageToken(default_chain_spec)
|
||||||
|
o = c.symbol(demurrage_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
return c.parse_symbol(r)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token_declaration(
|
||||||
|
foo_token_declaration,
|
||||||
|
):
|
||||||
|
return foo_token_declaration
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def register_demurrage_token(
|
||||||
|
default_chain_spec,
|
||||||
|
token_registry,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
register_lookups,
|
||||||
|
contract_roles,
|
||||||
|
demurrage_token_declaration,
|
||||||
|
demurrage_token,
|
||||||
|
address_declarator,
|
||||||
|
):
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
|
||||||
|
|
||||||
|
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.register(token_registry, contract_roles['CONTRACT_DEPLOYER'], demurrage_token)
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['TRUSTED_DECLARATOR'], eth_rpc)
|
||||||
|
c = AddressDeclarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['TRUSTED_DECLARATOR'], demurrage_token, demurrage_token_declaration)
|
||||||
|
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
return token_registry
|
||||||
|
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import copy
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# cic-eth imports
|
||||||
|
from cic_eth_aux.erc20_demurrage_token import (
|
||||||
|
DemurrageCalculationTask,
|
||||||
|
aux_setup,
|
||||||
|
)
|
||||||
|
from cic_eth_aux.erc20_demurrage_token.api import Api as AuxApi
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_demurrage_calulate_task(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
cic_registry,
|
||||||
|
celery_session_worker,
|
||||||
|
register_demurrage_token,
|
||||||
|
demurrage_token_symbol,
|
||||||
|
contract_roles,
|
||||||
|
load_config,
|
||||||
|
):
|
||||||
|
|
||||||
|
config = copy.copy(load_config)
|
||||||
|
config.add(str(default_chain_spec), 'CIC_CHAIN_SPEC', exists_ok=True)
|
||||||
|
config.add(demurrage_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||||
|
aux_setup(eth_rpc, load_config, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
|
||||||
|
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||||
|
[
|
||||||
|
demurrage_token_symbol,
|
||||||
|
1000,
|
||||||
|
since.timestamp(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
assert r == 980
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def test_demurrage_calculate_api(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
cic_registry,
|
||||||
|
celery_session_worker,
|
||||||
|
register_demurrage_token,
|
||||||
|
demurrage_token_symbol,
|
||||||
|
contract_roles,
|
||||||
|
load_config,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = AuxApi(str(default_chain_spec), queue=None)
|
||||||
|
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||||
|
t = api.get_adjusted_balance(demurrage_token_symbol, 1000, since.timestamp())
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
assert r == 980
|
||||||
|
|
||||||
@@ -5,3 +5,6 @@ omit =
|
|||||||
cic_eth/db/migrations/*
|
cic_eth/db/migrations/*
|
||||||
cic_eth/sync/head.py
|
cic_eth/sync/head.py
|
||||||
cic_eth/sync/mempool.py
|
cic_eth/sync/mempool.py
|
||||||
|
cic_eth/queue/state.py
|
||||||
|
cic_eth/cli
|
||||||
|
*redis*.py
|
||||||
|
|||||||
6
apps/cic-eth/.dockerignore
Normal file
6
apps/cic-eth/.dockerignore
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
|
**/.venv
|
||||||
|
**/venv
|
||||||
@@ -1,22 +1,16 @@
|
|||||||
.cic_eth_variables:
|
build-test-cic-eth:
|
||||||
variables:
|
stage: test
|
||||||
APP_NAME: cic-eth
|
tags:
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
- integration
|
||||||
|
variables:
|
||||||
.cic_eth_changes_target:
|
APP_NAME: cic-eth
|
||||||
rules:
|
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||||
- changes:
|
script:
|
||||||
- $CONTEXT/$APP_NAME/*
|
- cd apps/cic-eth
|
||||||
|
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||||
build-mr-cic-eth:
|
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||||
extends:
|
rules:
|
||||||
- .cic_eth_changes_target
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
- .py_build_merge_request
|
changes:
|
||||||
- .cic_eth_variables
|
- apps/$APP_NAME/**/*
|
||||||
|
when: always
|
||||||
build-push-cic-eth:
|
|
||||||
extends:
|
|
||||||
- .py_build_push
|
|
||||||
- .cic_eth_variables
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
2
apps/cic-eth/MANIFEST.in
Normal file
2
apps/cic-eth/MANIFEST.in
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
include *requirements.txt config/test/* cic_eth/data/config/*
|
||||||
|
|
||||||
5
apps/cic-eth/admin_requirements.txt
Normal file
5
apps/cic-eth/admin_requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
SQLAlchemy==1.3.20
|
||||||
|
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||||
|
hexathon~=0.0.1a8
|
||||||
|
chainqueue>=0.0.4a6,<0.1.0
|
||||||
|
eth-erc20>=0.1.2a2,<0.2.0
|
||||||
@@ -2,10 +2,14 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from hexathon import (
|
||||||
|
add_0x,
|
||||||
|
strip_0x,
|
||||||
|
uniform as hex_uniform,
|
||||||
|
)
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
@@ -15,12 +19,17 @@ from cic_eth.task import (
|
|||||||
CriticalSQLAlchemyTask,
|
CriticalSQLAlchemyTask,
|
||||||
)
|
)
|
||||||
from cic_eth.error import LockedError
|
from cic_eth.error import LockedError
|
||||||
|
from cic_eth.encode import (
|
||||||
|
tx_normalize,
|
||||||
|
ZERO_ADDRESS_NORMAL,
|
||||||
|
)
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
|
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL, tx_hash=None):
|
||||||
"""Task wrapper to set arbitrary locks
|
"""Task wrapper to set arbitrary locks
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -32,14 +41,17 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
address = tx_normalize.wallet_address(address)
|
||||||
|
chain_str = '::'
|
||||||
|
if chain_spec_dict != None:
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
|
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL):
|
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL):
|
||||||
"""Task wrapper to reset arbitrary locks
|
"""Task wrapper to reset arbitrary locks
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -51,14 +63,17 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
address = tx_normalize.wallet_address(address)
|
||||||
|
chain_str = '::'
|
||||||
|
if chain_spec_dict != None:
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, flags, address=address)
|
r = Lock.reset(chain_str, flags, address=address)
|
||||||
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
|
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||||
"""Task wrapper to set send lock
|
"""Task wrapper to set send lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -68,6 +83,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
address = tx_normalize.wallet_address(address)
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||||
@@ -75,7 +91,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||||
"""Task wrapper to reset send lock
|
"""Task wrapper to reset send lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -85,6 +101,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
address = tx_normalize.wallet_address(address)
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||||
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||||
@@ -92,7 +109,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||||
"""Task wrapper to set queue direct lock
|
"""Task wrapper to set queue direct lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -102,6 +119,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
address = tx_normalize.wallet_address(address)
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||||
@@ -109,7 +127,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||||
"""Task wrapper to reset queue direct lock
|
"""Task wrapper to reset queue direct lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@@ -119,6 +137,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
address = tx_normalize.wallet_address(address)
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||||
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||||
@@ -127,9 +146,13 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
|||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
||||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
if address != None:
|
||||||
|
address = tx_normalize.wallet_address(address)
|
||||||
|
chain_str = '::'
|
||||||
|
if chain_spec_dict != None:
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session)
|
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS_NORMAL, session=session)
|
||||||
if address != None:
|
if address != None:
|
||||||
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
||||||
if r > 0:
|
if r > 0:
|
||||||
@@ -139,3 +162,9 @@ def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
|||||||
session.flush()
|
session.flush()
|
||||||
session.close()
|
session.close()
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def shutdown(message):
|
||||||
|
logg.critical('shutdown called: {}'.format(message))
|
||||||
|
celery_app.control.shutdown() #broadcast('shutdown')
|
||||||
|
|||||||
@@ -4,12 +4,25 @@ import logging
|
|||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.eth.tx import unpack
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
unpack,
|
||||||
|
TxFactory,
|
||||||
|
)
|
||||||
|
from chainlib.eth.gas import OverrideGasOracle
|
||||||
|
from chainqueue.sql.query import get_tx
|
||||||
|
from chainqueue.sql.state import set_cancel
|
||||||
|
from chainqueue.db.models.otx import Otx
|
||||||
|
from chainqueue.db.models.tx import TxCache
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
uniform as hex_uniform,
|
||||||
|
)
|
||||||
|
from potaahto.symbols import snake_and_camel
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.models.nonce import Nonce
|
from cic_eth.db.models.nonce import Nonce
|
||||||
from cic_eth.admin.ctrl import (
|
from cic_eth.admin.ctrl import (
|
||||||
lock_send,
|
lock_send,
|
||||||
@@ -17,21 +30,17 @@ from cic_eth.admin.ctrl import (
|
|||||||
lock_queue,
|
lock_queue,
|
||||||
unlock_queue,
|
unlock_queue,
|
||||||
)
|
)
|
||||||
from cic_eth.queue.tx import (
|
from cic_eth.queue.tx import queue_create
|
||||||
get_tx,
|
from cic_eth.eth.gas import create_check_gas_task
|
||||||
set_cancel,
|
from cic_eth.task import BaseTask
|
||||||
)
|
from cic_eth.encode import tx_normalize
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.eth.gas import (
|
|
||||||
create_check_gas_task,
|
|
||||||
)
|
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True)
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
|
def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||||
"""Shift all transactions with nonces higher than the offset by the provided position delta.
|
"""Shift all transactions with nonces higher than the offset by the provided position delta.
|
||||||
|
|
||||||
Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN.
|
Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN.
|
||||||
@@ -42,89 +51,109 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
|
|||||||
:type tx_hash_orig_hex: str, 0x-hex
|
:type tx_hash_orig_hex: str, 0x-hex
|
||||||
:param delta: Amount
|
:param delta: Amount
|
||||||
"""
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chainspec_dict)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
queue = None
|
queue = None
|
||||||
try:
|
try:
|
||||||
queue = self.request.delivery_info.get('routing_key')
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
session = BaseTask.session_func()
|
||||||
tx_brief = get_tx(tx_hash_orig_hex)
|
tx_brief = get_tx(chain_spec, tx_hash_orig_hex, session=session)
|
||||||
tx_raw = bytes.fromhex(tx_brief['signed_tx'][2:])
|
tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx']))
|
||||||
tx = unpack(tx_raw, chain_spec.chain_id())
|
tx = unpack(tx_raw, chain_spec)
|
||||||
nonce = tx_brief['nonce']
|
nonce = tx_brief['nonce']
|
||||||
address = tx['from']
|
address = tx['from']
|
||||||
|
|
||||||
logg.debug('shifting nonce {} position(s) for address {}, offset {}'.format(delta, address, nonce))
|
logg.debug('shifting nonce {} position(s) for address {}, offset {}, hash {}'.format(delta, address, nonce, tx['hash']))
|
||||||
|
|
||||||
lock_queue(None, chain_str, address)
|
lock_queue(None, chain_spec.asdict(), address=address)
|
||||||
lock_send(None, chain_str, address)
|
lock_send(None, chain_spec.asdict(), address=address)
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
|
||||||
|
|
||||||
|
query_address = tx_normalize.wallet_address(address)
|
||||||
q = session.query(Otx)
|
q = session.query(Otx)
|
||||||
q = q.join(TxCache)
|
q = q.join(TxCache)
|
||||||
q = q.filter(TxCache.sender==address)
|
q = q.filter(TxCache.sender==query_address)
|
||||||
q = q.filter(Otx.nonce>=nonce+delta)
|
q = q.filter(Otx.nonce>=nonce+delta)
|
||||||
q = q.order_by(Otx.nonce.asc())
|
q = q.order_by(Otx.nonce.asc())
|
||||||
otxs = q.all()
|
otxs = q.all()
|
||||||
|
|
||||||
tx_hashes = []
|
tx_hashes = []
|
||||||
txs = []
|
txs = []
|
||||||
|
gas_total = 0
|
||||||
for otx in otxs:
|
for otx in otxs:
|
||||||
tx_raw = bytes.fromhex(otx.signed_tx[2:])
|
tx_raw = bytes.fromhex(strip_0x(otx.signed_tx))
|
||||||
tx_new = unpack(tx_raw, chain_spec.chain_id())
|
tx_new = unpack(tx_raw, chain_spec)
|
||||||
|
tx_new = snake_and_camel(tx_new)
|
||||||
|
|
||||||
tx_previous_hash_hex = tx_new['hash']
|
tx_previous_hash_hex = tx_new['hash']
|
||||||
tx_previous_nonce = tx_new['nonce']
|
tx_previous_nonce = tx_new['nonce']
|
||||||
|
|
||||||
|
tx_new['gas_price'] += 1
|
||||||
|
tx_new['gasPrice'] = tx_new['gas_price']
|
||||||
|
tx_new['nonce'] -= delta
|
||||||
|
gas_total += tx_new['gas_price'] * tx_new['gas']
|
||||||
|
|
||||||
|
logg.debug('tx_new {}'.format(tx_new))
|
||||||
|
logg.debug('gas running total {}'.format(gas_total))
|
||||||
|
|
||||||
del(tx_new['hash'])
|
del(tx_new['hash'])
|
||||||
del(tx_new['hash_unsigned'])
|
del(tx_new['hash_unsigned'])
|
||||||
tx_new['nonce'] -= delta
|
del(tx_new['hashUnsigned'])
|
||||||
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx_new, chain_str)
|
gas_oracle = OverrideGasOracle(limit=tx_new['gas'], price=tx_new['gas_price'] + 1) # TODO: it should be possible to merely set this price here and if missing in the existing struct then fill it in (chainlib.eth.tx)
|
||||||
|
c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx_new)
|
||||||
logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce']))
|
logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce']))
|
||||||
|
|
||||||
otx = Otx(
|
otx = Otx(
|
||||||
nonce=tx_new['nonce'],
|
tx_new['nonce'],
|
||||||
address=tx_new['from'],
|
tx_hash_hex,
|
||||||
tx_hash=tx_hash_hex,
|
tx_signed_raw_hex,
|
||||||
signed_tx=tx_signed_raw_hex,
|
)
|
||||||
)
|
|
||||||
session.add(otx)
|
session.add(otx)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
# TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces.
|
# TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces.
|
||||||
set_cancel(tx_previous_hash_hex, True)
|
set_cancel(chain_spec, strip_0x(tx_previous_hash_hex), manual=True, session=session)
|
||||||
|
|
||||||
TxCache.clone(tx_previous_hash_hex, tx_hash_hex)
|
TxCache.clone(tx_previous_hash_hex, tx_hash_hex, session=session)
|
||||||
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
tx_hashes.append(tx_hash_hex)
|
||||||
txs.append(tx_signed_raw_hex)
|
txs.append(tx_signed_raw_hex)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
s = create_check_gas_and_send_task(
|
s = create_check_gas_task(
|
||||||
txs,
|
txs,
|
||||||
chain_str,
|
chain_spec,
|
||||||
tx_new['from'],
|
#tx_new['from'],
|
||||||
tx_new['gas'],
|
address,
|
||||||
tx_hashes,
|
#gas=tx_new['gas'],
|
||||||
queue,
|
gas=gas_total,
|
||||||
|
tx_hashes_hex=tx_hashes,
|
||||||
|
queue=queue,
|
||||||
)
|
)
|
||||||
|
|
||||||
s_unlock_send = celery.signature(
|
s_unlock_send = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock_send',
|
'cic_eth.admin.ctrl.unlock_send',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
tx_new['from'],
|
address,
|
||||||
|
#tx_new['from'],
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
s_unlock_direct = celery.signature(
|
s_unlock_direct = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock_queue',
|
'cic_eth.admin.ctrl.unlock_queue',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
tx_new['from'],
|
address,
|
||||||
|
#tx_new['from'],
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
|
|||||||
21
apps/cic-eth/cic_eth/admin/token.py
Normal file
21
apps/cic-eth/cic_eth/admin/token.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.task import BaseTask
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def default_token(self):
|
||||||
|
return {
|
||||||
|
'symbol': self.default_token_symbol,
|
||||||
|
'address': self.default_token_address,
|
||||||
|
'name': self.default_token_name,
|
||||||
|
'decimals': self.default_token_decimals,
|
||||||
|
}
|
||||||
@@ -5,4 +5,3 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from .api_task import Api
|
from .api_task import Api
|
||||||
from .api_admin import AdminApi
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from chainlib.eth.constant import (
|
|||||||
ZERO_ADDRESS,
|
ZERO_ADDRESS,
|
||||||
)
|
)
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
from cic_eth_registry.error import UnknownContractError
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.contract import code
|
from chainlib.eth.contract import code
|
||||||
@@ -20,25 +21,25 @@ from chainlib.hash import keccak256_hex_to_hex
|
|||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
|
uniform as hex_uniform,
|
||||||
)
|
)
|
||||||
from chainlib.eth.gas import balance
|
from chainlib.eth.gas import balance
|
||||||
|
from chainqueue.db.enum import (
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
from cic_eth.db.models.role import AccountRole
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.models.nonce import Nonce
|
|
||||||
from cic_eth.db.enum import (
|
|
||||||
StatusEnum,
|
StatusEnum,
|
||||||
StatusBits,
|
StatusBits,
|
||||||
is_alive,
|
is_alive,
|
||||||
is_error_status,
|
is_error_status,
|
||||||
status_str,
|
status_str,
|
||||||
)
|
)
|
||||||
|
from chainqueue.error import TxStateChangeError
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
from cic_eth.error import InitializationError
|
from cic_eth.error import InitializationError
|
||||||
from cic_eth.db.error import TxStateChangeError
|
from cic_eth.queue.query import get_tx_local
|
||||||
from cic_eth.queue.tx import get_tx
|
|
||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
|
|
||||||
@@ -62,6 +63,29 @@ class AdminApi:
|
|||||||
self.call_address = call_address
|
self.call_address = call_address
|
||||||
|
|
||||||
|
|
||||||
|
def proxy_do(self, chain_spec, o):
|
||||||
|
s_proxy = celery.signature(
|
||||||
|
'cic_eth.task.rpc_proxy',
|
||||||
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
o,
|
||||||
|
'default',
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
return s_proxy.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def registry(self):
|
||||||
|
s_registry = celery.signature(
|
||||||
|
'cic_eth.task.registry',
|
||||||
|
[],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
return s_registry.apply_async()
|
||||||
|
|
||||||
|
|
||||||
def unlock(self, chain_spec, address, flags=None):
|
def unlock(self, chain_spec, address, flags=None):
|
||||||
s_unlock = celery.signature(
|
s_unlock = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock',
|
'cic_eth.admin.ctrl.unlock',
|
||||||
@@ -92,7 +116,7 @@ class AdminApi:
|
|||||||
|
|
||||||
def get_lock(self):
|
def get_lock(self):
|
||||||
s_lock = celery.signature(
|
s_lock = celery.signature(
|
||||||
'cic_eth.queue.tx.get_lock',
|
'cic_eth.queue.lock.get_lock',
|
||||||
[],
|
[],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@@ -134,11 +158,13 @@ class AdminApi:
|
|||||||
return s_have.apply_async()
|
return s_have.apply_async()
|
||||||
|
|
||||||
|
|
||||||
def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False):
|
def resend(self, tx_hash_hex, chain_spec, in_place=True, unlock=False):
|
||||||
|
|
||||||
logg.debug('resend {}'.format(tx_hash_hex))
|
logg.debug('resend {}'.format(tx_hash_hex))
|
||||||
s_get_tx_cache = celery.signature(
|
s_get_tx_cache = celery.signature(
|
||||||
'cic_eth.queue.tx.get_tx_cache',
|
'cic_eth.queue.query.get_tx_cache',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -146,7 +172,6 @@ class AdminApi:
|
|||||||
|
|
||||||
# TODO: This check should most likely be in resend task itself
|
# TODO: This check should most likely be in resend task itself
|
||||||
tx_dict = s_get_tx_cache.apply_async().get()
|
tx_dict = s_get_tx_cache.apply_async().get()
|
||||||
#if tx_dict['status'] in [StatusEnum.REVERTED, StatusEnum.SUCCESS, StatusEnum.CANCELLED, StatusEnum.OBSOLETED]:
|
|
||||||
if not is_alive(getattr(StatusEnum, tx_dict['status']).value):
|
if not is_alive(getattr(StatusEnum, tx_dict['status']).value):
|
||||||
raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex))
|
raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex))
|
||||||
|
|
||||||
@@ -154,9 +179,9 @@ class AdminApi:
|
|||||||
raise NotImplementedError('resend as new not yet implemented')
|
raise NotImplementedError('resend as new not yet implemented')
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.eth.tx.resend_with_higher_gas',
|
'cic_eth.eth.gas.resend_with_higher_gas',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
None,
|
None,
|
||||||
1.01,
|
1.01,
|
||||||
],
|
],
|
||||||
@@ -164,8 +189,9 @@ class AdminApi:
|
|||||||
)
|
)
|
||||||
|
|
||||||
s_manual = celery.signature(
|
s_manual = celery.signature(
|
||||||
'cic_eth.queue.tx.set_manual',
|
'cic_eth.queue.state.set_manual',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -176,7 +202,7 @@ class AdminApi:
|
|||||||
s_gas = celery.signature(
|
s_gas = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock_send',
|
'cic_eth.admin.ctrl.unlock_send',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
tx_dict['sender'],
|
tx_dict['sender'],
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -184,11 +210,13 @@ class AdminApi:
|
|||||||
s.link(s_gas)
|
s.link(s_gas)
|
||||||
|
|
||||||
return s_manual.apply_async()
|
return s_manual.apply_async()
|
||||||
|
|
||||||
def check_nonce(self, address):
|
|
||||||
|
def check_nonce(self, chain_spec, address):
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_account_tx',
|
'cic_eth.queue.query.get_account_tx',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
@@ -203,14 +231,14 @@ class AdminApi:
|
|||||||
last_nonce = -1
|
last_nonce = -1
|
||||||
for k in txs.keys():
|
for k in txs.keys():
|
||||||
s_get_tx = celery.signature(
|
s_get_tx = celery.signature(
|
||||||
'cic_eth.queue.tx.get_tx',
|
'cic_eth.queue.query.get_tx',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
k,
|
k,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
tx = s_get_tx.apply_async().get()
|
tx = s_get_tx.apply_async().get()
|
||||||
#tx = get_tx(k)
|
|
||||||
logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce))
|
logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce))
|
||||||
nonce_otx = tx['nonce']
|
nonce_otx = tx['nonce']
|
||||||
if not is_alive(tx['status']) and tx['status'] & local_fail > 0:
|
if not is_alive(tx['status']) and tx['status'] & local_fail > 0:
|
||||||
@@ -218,15 +246,14 @@ class AdminApi:
|
|||||||
blocking_tx = k
|
blocking_tx = k
|
||||||
blocking_nonce = nonce_otx
|
blocking_nonce = nonce_otx
|
||||||
elif nonce_otx - last_nonce > 1:
|
elif nonce_otx - last_nonce > 1:
|
||||||
logg.error('nonce gap; {} followed {}'.format(nonce_otx, last_nonce))
|
logg.debug('tx {}'.format(tx))
|
||||||
|
tx_obj = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)
|
||||||
|
logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx_obj['from']))
|
||||||
blocking_tx = k
|
blocking_tx = k
|
||||||
blocking_nonce = nonce_otx
|
blocking_nonce = nonce_otx
|
||||||
break
|
break
|
||||||
last_nonce = nonce_otx
|
last_nonce = nonce_otx
|
||||||
|
|
||||||
#nonce_cache = Nonce.get(address)
|
|
||||||
#nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'nonce': {
|
'nonce': {
|
||||||
#'network': nonce_cache,
|
#'network': nonce_cache,
|
||||||
@@ -235,15 +262,17 @@ class AdminApi:
|
|||||||
'blocking': blocking_nonce,
|
'blocking': blocking_nonce,
|
||||||
},
|
},
|
||||||
'tx': {
|
'tx': {
|
||||||
'blocking': blocking_tx,
|
'blocking': add_0x(blocking_tx),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def fix_nonce(self, address, nonce, chain_spec):
|
# TODO: is risky since it does not validate that there is actually a nonce problem?
|
||||||
|
def fix_nonce(self, chain_spec, address, nonce):
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_account_tx',
|
'cic_eth.queue.query.get_account_tx',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
@@ -253,15 +282,17 @@ class AdminApi:
|
|||||||
txs = s.apply_async().get()
|
txs = s.apply_async().get()
|
||||||
|
|
||||||
tx_hash_hex = None
|
tx_hash_hex = None
|
||||||
|
session = SessionBase.create_session()
|
||||||
for k in txs.keys():
|
for k in txs.keys():
|
||||||
tx_dict = get_tx(k)
|
tx_dict = get_tx_local(chain_spec, k, session=session)
|
||||||
if tx_dict['nonce'] == nonce:
|
if tx_dict['nonce'] == nonce:
|
||||||
tx_hash_hex = k
|
tx_hash_hex = k
|
||||||
|
session.close()
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.admin.nonce.shift_nonce',
|
'cic_eth.admin.nonce.shift_nonce',
|
||||||
[
|
[
|
||||||
self.rpc.chain_spec.asdict(),
|
chain_spec.asdict(),
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
],
|
],
|
||||||
queue=self.queue
|
queue=self.queue
|
||||||
@@ -269,20 +300,6 @@ class AdminApi:
|
|||||||
return s_nonce.apply_async()
|
return s_nonce.apply_async()
|
||||||
|
|
||||||
|
|
||||||
# # TODO: this is a stub, complete all checks
|
|
||||||
# def ready(self):
|
|
||||||
# """Checks whether all required initializations have been performed.
|
|
||||||
#
|
|
||||||
# :raises cic_eth.error.InitializationError: At least one setting pre-requisite has not been met.
|
|
||||||
# :raises KeyError: An address provided for initialization is not known by the keystore.
|
|
||||||
# """
|
|
||||||
# addr = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
|
|
||||||
# if addr == ZERO_ADDRESS:
|
|
||||||
# raise InitializationError('missing account ETH_GAS_PROVIDER_ADDRESS')
|
|
||||||
#
|
|
||||||
# self.w3.eth.sign(addr, text='666f6f')
|
|
||||||
|
|
||||||
|
|
||||||
def account(self, chain_spec, address, include_sender=True, include_recipient=True, renderer=None, w=sys.stdout):
|
def account(self, chain_spec, address, include_sender=True, include_recipient=True, renderer=None, w=sys.stdout):
|
||||||
"""Lists locally originated transactions for the given Ethereum address.
|
"""Lists locally originated transactions for the given Ethereum address.
|
||||||
|
|
||||||
@@ -291,10 +308,13 @@ class AdminApi:
|
|||||||
:param address: Ethereum address to return transactions for
|
:param address: Ethereum address to return transactions for
|
||||||
:type address: str, 0x-hex
|
:type address: str, 0x-hex
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
address = add_0x(hex_uniform(strip_0x(address)))
|
||||||
last_nonce = -1
|
last_nonce = -1
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_account_tx',
|
'cic_eth.queue.query.get_account_tx',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -305,17 +325,20 @@ class AdminApi:
|
|||||||
for tx_hash in txs.keys():
|
for tx_hash in txs.keys():
|
||||||
errors = []
|
errors = []
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_tx_cache',
|
'cic_eth.queue.query.get_tx_cache',
|
||||||
[tx_hash],
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx_hash,
|
||||||
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
tx_dict = s.apply_async().get()
|
tx_dict = s.apply_async().get()
|
||||||
if tx_dict['sender'] == address:
|
if tx_dict['sender'] == address:
|
||||||
if tx_dict['nonce'] - last_nonce > 1:
|
if tx_dict['nonce'] - last_nonce > 1:
|
||||||
logg.error('nonce gap; {} followed {} for tx {}'.format(tx_dict['nonce'], last_nonce, tx_dict['hash']))
|
logg.error('nonce gap; {} followed {} for address {} tx {}'.format(tx_dict['nonce'], last_nonce, tx_dict['sender'], tx_hash))
|
||||||
errors.append('nonce')
|
errors.append('nonce')
|
||||||
elif tx_dict['nonce'] == last_nonce:
|
elif tx_dict['nonce'] == last_nonce:
|
||||||
logg.warning('nonce {} duplicate in tx {}'.format(tx_dict['nonce'], tx_dict['hash']))
|
logg.info('nonce {} duplicate for address {} in tx {}'.format(tx_dict['nonce'], tx_dict['sender'], tx_hash))
|
||||||
last_nonce = tx_dict['nonce']
|
last_nonce = tx_dict['nonce']
|
||||||
if not include_sender:
|
if not include_sender:
|
||||||
logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash']))
|
logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash']))
|
||||||
@@ -341,6 +364,7 @@ class AdminApi:
|
|||||||
|
|
||||||
|
|
||||||
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
|
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
|
||||||
|
# TODO: This method is WAY too long
|
||||||
def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None, renderer=None, w=sys.stdout):
|
def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None, renderer=None, w=sys.stdout):
|
||||||
"""Output local and network details about a given transaction with local origin.
|
"""Output local and network details about a given transaction with local origin.
|
||||||
|
|
||||||
@@ -363,50 +387,99 @@ class AdminApi:
|
|||||||
|
|
||||||
if tx_raw != None:
|
if tx_raw != None:
|
||||||
tx_hash = add_0x(keccak256_hex_to_hex(tx_raw))
|
tx_hash = add_0x(keccak256_hex_to_hex(tx_raw))
|
||||||
#tx_hash = self.w3.keccak(hexstr=tx_raw).hex()
|
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_tx_cache',
|
'cic_eth.queue.query.get_tx_cache',
|
||||||
[tx_hash],
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx_hash,
|
||||||
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
|
|
||||||
tx = s.apply_async().get()
|
t = s.apply_async()
|
||||||
|
tx = t.get()
|
||||||
|
|
||||||
source_token = None
|
source_token = None
|
||||||
if tx['source_token'] != ZERO_ADDRESS:
|
if tx['source_token'] != ZERO_ADDRESS:
|
||||||
try:
|
source_token_declaration = None
|
||||||
source_token = registry.by_address(tx['source_token'])
|
if registry != None:
|
||||||
#source_token = CICRegistry.get_address(chain_spec, tx['source_token']).contract
|
try:
|
||||||
except UnknownContractError:
|
source_token_declaration = registry.by_address(tx['source_token'], sender_address=self.call_address)
|
||||||
#source_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
except UnknownContractError:
|
||||||
#source_token = CICRegistry.add_token(chain_spec, source_token_contract)
|
logg.warning('unknown source token contract {} (direct)'.format(tx['source_token']))
|
||||||
logg.warning('unknown source token contract {}'.format(tx['source_token']))
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.task.registry_address_lookup',
|
||||||
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx['source_token'],
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
source_token_declaration = t.get()
|
||||||
|
|
||||||
|
if source_token_declaration != None:
|
||||||
|
logg.warning('found declarator record for source token {} but not checking validity'.format(tx['source_token']))
|
||||||
|
source_token = ERC20Token(chain_spec, self.rpc, tx['source_token'])
|
||||||
|
logg.debug('source token set tup {}'.format(source_token))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
destination_token = None
|
destination_token = None
|
||||||
if tx['source_token'] != ZERO_ADDRESS:
|
if tx['destination_token'] != ZERO_ADDRESS:
|
||||||
try:
|
destination_token_declaration = None
|
||||||
#destination_token = CICRegistry.get_address(chain_spec, tx['destination_token'])
|
if registry != None:
|
||||||
destination_token = registry.by_address(tx['destination_token'])
|
try:
|
||||||
except UnknownContractError:
|
destination_token_declaration = registry.by_address(tx['destination_token'], sender_address=self.call_address)
|
||||||
#destination_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
except UnknownContractError:
|
||||||
#destination_token = CICRegistry.add_token(chain_spec, destination_token_contract)
|
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
|
||||||
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.task.registry_address_lookup',
|
||||||
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx['destination_token'],
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
destination_token_declaration = t.get()
|
||||||
|
if destination_token_declaration != None:
|
||||||
|
logg.warning('found declarator record for destination token {} but not checking validity'.format(tx['destination_token']))
|
||||||
|
destination_token = ERC20Token(chain_spec, self.rpc, tx['destination_token'])
|
||||||
|
|
||||||
tx['sender_description'] = 'Custodial account'
|
tx['sender_description'] = 'Custodial account'
|
||||||
tx['recipient_description'] = 'Custodial account'
|
tx['recipient_description'] = 'Custodial account'
|
||||||
|
|
||||||
o = code(tx['sender'])
|
o = code(tx['sender'])
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
if len(strip_0x(r, allow_empty=True)) > 0:
|
if len(strip_0x(r, allow_empty=True)) > 0:
|
||||||
try:
|
if registry != None:
|
||||||
#sender_contract = CICRegistry.get_address(chain_spec, tx['sender'])
|
try:
|
||||||
sender_contract = registry.by_address(tx['sender'], sender_address=self.call_address)
|
sender_contract = registry.by_address(tx['sender'], sender_address=self.call_address)
|
||||||
tx['sender_description'] = 'Contract at {}'.format(tx['sender']) #sender_contract)
|
tx['sender_description'] = 'Contract at {}'.format(tx['sender'])
|
||||||
except UnknownContractError:
|
except UnknownContractError:
|
||||||
tx['sender_description'] = 'Unknown contract'
|
tx['sender_description'] = 'Unknown contract'
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
tx['sender_description'] = 'Unknown contract'
|
tx['sender_description'] = 'Unknown contract'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.task.registry_address_lookup',
|
||||||
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx['sender'],
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
tx['sender_description'] = t.get()
|
||||||
|
if tx['sender_description'] == None:
|
||||||
|
tx['sender_description'] = 'Unknown contract'
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
@@ -435,16 +508,31 @@ class AdminApi:
|
|||||||
tx['sender_description'] = role
|
tx['sender_description'] = role
|
||||||
|
|
||||||
o = code(tx['recipient'])
|
o = code(tx['recipient'])
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
if len(strip_0x(r, allow_empty=True)) > 0:
|
if len(strip_0x(r, allow_empty=True)) > 0:
|
||||||
try:
|
if registry != None:
|
||||||
#recipient_contract = CICRegistry.by_address(tx['recipient'])
|
try:
|
||||||
recipient_contract = registry.by_address(tx['recipient'])
|
recipient_contract = registry.by_address(tx['recipient'])
|
||||||
tx['recipient_description'] = 'Contract at {}'.format(tx['recipient']) #recipient_contract)
|
tx['recipient_description'] = 'Contract at {}'.format(tx['recipient'])
|
||||||
except UnknownContractError as e:
|
except UnknownContractError as e:
|
||||||
tx['recipient_description'] = 'Unknown contract'
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
tx['recipient_description'] = 'Unknown contract'
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.task.registry_address_lookup',
|
||||||
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
|
tx['recipient'],
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
tx['recipient_description'] = t.get()
|
||||||
|
if tx['recipient_description'] == None:
|
||||||
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
@@ -472,28 +560,38 @@ class AdminApi:
|
|||||||
if role != None:
|
if role != None:
|
||||||
tx['recipient_description'] = role
|
tx['recipient_description'] = role
|
||||||
|
|
||||||
|
erc20_c = ERC20(chain_spec)
|
||||||
if source_token != None:
|
if source_token != None:
|
||||||
tx['source_token_symbol'] = source_token.symbol()
|
tx['source_token_symbol'] = source_token.symbol
|
||||||
tx['sender_token_balance'] = source_token.function('balanceOf')(tx['sender']).call()
|
o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
|
||||||
|
r = self.rpc.do(o)
|
||||||
|
tx['sender_token_balance'] = erc20_c.parse_balance(r)
|
||||||
|
|
||||||
if destination_token != None:
|
if destination_token != None:
|
||||||
tx['destination_token_symbol'] = destination_token.symbol()
|
tx['destination_token_symbol'] = destination_token.symbol
|
||||||
tx['recipient_token_balance'] = source_token.function('balanceOf')(tx['recipient']).call()
|
o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
|
||||||
|
r = self.rpc.do(o)
|
||||||
|
tx['recipient_token_balance'] = erc20_c.parse_balance(r)
|
||||||
|
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
|
||||||
|
|
||||||
tx['network_status'] = 'Not submitted'
|
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
|
||||||
|
tx['network_status'] = 'Not in node'
|
||||||
|
|
||||||
r = None
|
r = None
|
||||||
try:
|
try:
|
||||||
o = transaction(tx_hash)
|
o = transaction(tx_hash)
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
|
if r != None:
|
||||||
|
tx['network_status'] = 'Mempool'
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logg.warning('(too permissive exception handler, please fix!) {}'.format(e))
|
logg.warning('(too permissive exception handler, please fix!) {}'.format(e))
|
||||||
tx['network_status'] = 'Mempool'
|
|
||||||
|
|
||||||
if r != None:
|
if r != None:
|
||||||
try:
|
try:
|
||||||
o = receipt(tx_hash)
|
o = receipt(tx_hash)
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
logg.debug('h {} o {}'.format(tx_hash, o))
|
logg.debug('h {} o {}'.format(tx_hash, o))
|
||||||
if int(strip_0x(r['status'])) == 1:
|
if int(strip_0x(r['status'])) == 1:
|
||||||
tx['network_status'] = 'Confirmed'
|
tx['network_status'] = 'Confirmed'
|
||||||
@@ -508,21 +606,24 @@ class AdminApi:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
o = balance(tx['sender'])
|
o = balance(tx['sender'])
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
tx['sender_gas_balance'] = r
|
tx['sender_gas_balance'] = r
|
||||||
|
|
||||||
o = balance(tx['recipient'])
|
o = balance(tx['recipient'])
|
||||||
r = self.rpc.do(o)
|
t = self.proxy_do(chain_spec, o)
|
||||||
|
r = t.get()
|
||||||
tx['recipient_gas_balance'] = r
|
tx['recipient_gas_balance'] = r
|
||||||
|
|
||||||
tx_unpacked = unpack(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id())
|
tx_unpacked = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)
|
||||||
tx['gas_price'] = tx_unpacked['gasPrice']
|
tx['gas_price'] = tx_unpacked['gasPrice']
|
||||||
tx['gas_limit'] = tx_unpacked['gas']
|
tx['gas_limit'] = tx_unpacked['gas']
|
||||||
tx['data'] = tx_unpacked['data']
|
tx['data'] = tx_unpacked['data']
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_state_log',
|
'cic_eth.queue.state.get_state_log',
|
||||||
[
|
[
|
||||||
|
chain_spec.asdict(),
|
||||||
tx_hash,
|
tx_hash,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -8,83 +8,182 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from cic_eth_registry import CICRegistry
|
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.api.base import ApiBase
|
||||||
|
from cic_eth.enum import LockEnum
|
||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Api:
|
class Api(ApiBase):
|
||||||
"""Creates task chains to perform well-known CIC operations.
|
|
||||||
|
|
||||||
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
|
def default_token(self):
|
||||||
|
s_token = celery.signature(
|
||||||
|
'cic_eth.admin.token.default_token',
|
||||||
|
[],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_token.link(self.callback_success)
|
||||||
|
|
||||||
:param callback_param: Static value to pass to callback
|
return s_token.apply_async()
|
||||||
:type callback_param: str
|
|
||||||
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
|
|
||||||
:type callback_task: string
|
|
||||||
:param queue: Name of worker queue to submit tasks to
|
|
||||||
:type queue: str
|
|
||||||
"""
|
|
||||||
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop.noop', callback_queue=None):
|
|
||||||
self.chain_str = chain_str
|
|
||||||
self.chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
self.callback_param = callback_param
|
|
||||||
self.callback_task = callback_task
|
|
||||||
self.queue = queue
|
|
||||||
logg.info('api using queue {}'.format(self.queue))
|
|
||||||
self.callback_success = None
|
|
||||||
self.callback_error = None
|
|
||||||
if callback_queue == None:
|
|
||||||
callback_queue=self.queue
|
|
||||||
|
|
||||||
if callback_param != None:
|
|
||||||
self.callback_success = celery.signature(
|
|
||||||
callback_task,
|
|
||||||
[
|
|
||||||
callback_param,
|
|
||||||
0,
|
|
||||||
],
|
|
||||||
queue=callback_queue,
|
|
||||||
)
|
|
||||||
self.callback_error = celery.signature(
|
|
||||||
callback_task,
|
|
||||||
[
|
|
||||||
callback_param,
|
|
||||||
1,
|
|
||||||
],
|
|
||||||
queue=callback_queue,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
# def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
||||||
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
|
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
|
||||||
|
#
|
||||||
|
# :param from_address: Ethereum address of sender
|
||||||
|
# :type from_address: str, 0x-hex
|
||||||
|
# :param to_address: Ethereum address of receipient
|
||||||
|
# :type to_address: str, 0x-hex
|
||||||
|
# :param target_return: Estimated return from conversion
|
||||||
|
# :type target_return: int
|
||||||
|
# :param minimum_return: The least value of destination token return to allow
|
||||||
|
# :type minimum_return: int
|
||||||
|
# :param from_token_symbol: ERC20 token symbol of token being converted
|
||||||
|
# :type from_token_symbol: str
|
||||||
|
# :param to_token_symbol: ERC20 token symbol of token to receive
|
||||||
|
# :type to_token_symbol: str
|
||||||
|
# :returns: uuid of root task
|
||||||
|
# :rtype: celery.Task
|
||||||
|
# """
|
||||||
|
# raise NotImplementedError('out of service until new DEX migration is done')
|
||||||
|
# s_check = celery.signature(
|
||||||
|
# 'cic_eth.admin.ctrl.check_lock',
|
||||||
|
# [
|
||||||
|
# [from_token_symbol, to_token_symbol],
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# LockEnum.QUEUE,
|
||||||
|
# from_address,
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_nonce = celery.signature(
|
||||||
|
# 'cic_eth.eth.nonce.reserve_nonce',
|
||||||
|
# [
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_tokens = celery.signature(
|
||||||
|
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
|
# [
|
||||||
|
# self.chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_convert = celery.signature(
|
||||||
|
# 'cic_eth.eth.bancor.convert_with_default_reserve',
|
||||||
|
# [
|
||||||
|
# from_address,
|
||||||
|
# target_return,
|
||||||
|
# minimum_return,
|
||||||
|
# to_address,
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_nonce.link(s_tokens)
|
||||||
|
# s_check.link(s_nonce)
|
||||||
|
# if self.callback_param != None:
|
||||||
|
# s_convert.link(self.callback_success)
|
||||||
|
# s_tokens.link(s_convert).on_error(self.callback_error)
|
||||||
|
# else:
|
||||||
|
# s_tokens.link(s_convert)
|
||||||
|
#
|
||||||
|
# t = s_check.apply_async(queue=self.queue)
|
||||||
|
# return t
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
||||||
|
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
|
||||||
|
#
|
||||||
|
# :param from_address: Ethereum address of sender
|
||||||
|
# :type from_address: str, 0x-hex
|
||||||
|
# :param target_return: Estimated return from conversion
|
||||||
|
# :type target_return: int
|
||||||
|
# :param minimum_return: The least value of destination token return to allow
|
||||||
|
# :type minimum_return: int
|
||||||
|
# :param from_token_symbol: ERC20 token symbol of token being converted
|
||||||
|
# :type from_token_symbol: str
|
||||||
|
# :param to_token_symbol: ERC20 token symbol of token to receive
|
||||||
|
# :type to_token_symbol: str
|
||||||
|
# :returns: uuid of root task
|
||||||
|
# :rtype: celery.Task
|
||||||
|
# """
|
||||||
|
# raise NotImplementedError('out of service until new DEX migration is done')
|
||||||
|
# s_check = celery.signature(
|
||||||
|
# 'cic_eth.admin.ctrl.check_lock',
|
||||||
|
# [
|
||||||
|
# [from_token_symbol, to_token_symbol],
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# LockEnum.QUEUE,
|
||||||
|
# from_address,
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_nonce = celery.signature(
|
||||||
|
# 'cic_eth.eth.nonce.reserve_nonce',
|
||||||
|
# [
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_tokens = celery.signature(
|
||||||
|
# 'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
|
# [
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_convert = celery.signature(
|
||||||
|
# 'cic_eth.eth.bancor.convert_with_default_reserve',
|
||||||
|
# [
|
||||||
|
# from_address,
|
||||||
|
# target_return,
|
||||||
|
# minimum_return,
|
||||||
|
# from_address,
|
||||||
|
# self.chain_spec.asdict(),
|
||||||
|
# ],
|
||||||
|
# queue=self.queue,
|
||||||
|
# )
|
||||||
|
# s_nonce.link(s_tokens)
|
||||||
|
# s_check.link(s_nonce)
|
||||||
|
# if self.callback_param != None:
|
||||||
|
# s_convert.link(self.callback_success)
|
||||||
|
# s_tokens.link(s_convert).on_error(self.callback_error)
|
||||||
|
# else:
|
||||||
|
# s_tokens.link(s_convert)
|
||||||
|
#
|
||||||
|
# t = s_check.apply_async(queue=self.queue)
|
||||||
|
# return t
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_from(self, from_address, to_address, value, token_symbol, spender_address):
|
||||||
|
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens by one address on behalf of another address to a third party.
|
||||||
|
|
||||||
:param from_address: Ethereum address of sender
|
:param from_address: Ethereum address of sender
|
||||||
:type from_address: str, 0x-hex
|
:type from_address: str, 0x-hex
|
||||||
:param to_address: Ethereum address of receipient
|
:param to_address: Ethereum address of recipient
|
||||||
:type to_address: str, 0x-hex
|
:type to_address: str, 0x-hex
|
||||||
:param target_return: Estimated return from conversion
|
:param value: Estimated return from conversion
|
||||||
:type target_return: int
|
:type value: int
|
||||||
:param minimum_return: The least value of destination token return to allow
|
:param token_symbol: ERC20 token symbol of token to send
|
||||||
:type minimum_return: int
|
:type token_symbol: str
|
||||||
:param from_token_symbol: ERC20 token symbol of token being converted
|
:param spender_address: Ethereum address of recipient
|
||||||
:type from_token_symbol: str
|
:type spender_address: str, 0x-hex
|
||||||
:param to_token_symbol: ERC20 token symbol of token to receive
|
|
||||||
:type to_token_symbol: str
|
|
||||||
:returns: uuid of root task
|
:returns: uuid of root task
|
||||||
:rtype: celery.Task
|
:rtype: celery.Task
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('out of service until new DEX migration is done')
|
|
||||||
s_check = celery.signature(
|
s_check = celery.signature(
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
[from_token_symbol, to_token_symbol],
|
[token_symbol],
|
||||||
self.chain_spec.asdict(),
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
from_address,
|
from_address,
|
||||||
@@ -92,101 +191,54 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
[],
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_tokens = celery.signature(
|
s_tokens = celery.signature(
|
||||||
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_convert = celery.signature(
|
s_allow = celery.signature(
|
||||||
'cic_eth.eth.bancor.convert_with_default_reserve',
|
'cic_eth.eth.erc20.check_allowance',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
value,
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
spender_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.transfer_from',
|
||||||
[
|
[
|
||||||
from_address,
|
from_address,
|
||||||
target_return,
|
|
||||||
minimum_return,
|
|
||||||
to_address,
|
to_address,
|
||||||
|
value,
|
||||||
self.chain_spec.asdict(),
|
self.chain_spec.asdict(),
|
||||||
|
spender_address,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
|
s_tokens.link(s_allow)
|
||||||
s_nonce.link(s_tokens)
|
s_nonce.link(s_tokens)
|
||||||
s_check.link(s_nonce)
|
s_check.link(s_nonce)
|
||||||
if self.callback_param != None:
|
if self.callback_param != None:
|
||||||
s_convert.link(self.callback_success)
|
s_transfer.link(self.callback_success)
|
||||||
s_tokens.link(s_convert).on_error(self.callback_error)
|
s_allow.link(s_transfer).on_error(self.callback_error)
|
||||||
else:
|
else:
|
||||||
s_tokens.link(s_convert)
|
s_allow.link(s_transfer)
|
||||||
|
|
||||||
t = s_check.apply_async(queue=self.queue)
|
t = s_check.apply_async(queue=self.queue)
|
||||||
return t
|
return t
|
||||||
|
|
||||||
|
|
||||||
def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
|
||||||
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
|
|
||||||
|
|
||||||
:param from_address: Ethereum address of sender
|
|
||||||
:type from_address: str, 0x-hex
|
|
||||||
:param target_return: Estimated return from conversion
|
|
||||||
:type target_return: int
|
|
||||||
:param minimum_return: The least value of destination token return to allow
|
|
||||||
:type minimum_return: int
|
|
||||||
:param from_token_symbol: ERC20 token symbol of token being converted
|
|
||||||
:type from_token_symbol: str
|
|
||||||
:param to_token_symbol: ERC20 token symbol of token to receive
|
|
||||||
:type to_token_symbol: str
|
|
||||||
:returns: uuid of root task
|
|
||||||
:rtype: celery.Task
|
|
||||||
"""
|
|
||||||
raise NotImplementedError('out of service until new DEX migration is done')
|
|
||||||
s_check = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
[from_token_symbol, to_token_symbol],
|
|
||||||
self.chain_spec.asdict(),
|
|
||||||
LockEnum.QUEUE,
|
|
||||||
from_address,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_tokens = celery.signature(
|
|
||||||
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
|
||||||
[
|
|
||||||
self.chain_spec.asdict(),
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_convert = celery.signature(
|
|
||||||
'cic_eth.eth.bancor.convert_with_default_reserve',
|
|
||||||
[
|
|
||||||
from_address,
|
|
||||||
target_return,
|
|
||||||
minimum_return,
|
|
||||||
from_address,
|
|
||||||
self.chain_spec.asdict(),
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_nonce.link(s_tokens)
|
|
||||||
s_check.link(s_nonce)
|
|
||||||
if self.callback_param != None:
|
|
||||||
s_convert.link(self.callback_success)
|
|
||||||
s_tokens.link(s_convert).on_error(self.callback_error)
|
|
||||||
else:
|
|
||||||
s_tokens.link(s_convert)
|
|
||||||
|
|
||||||
t = s_check.apply_async(queue=self.queue)
|
|
||||||
return t
|
|
||||||
|
|
||||||
|
|
||||||
def transfer(self, from_address, to_address, value, token_symbol):
|
def transfer(self, from_address, to_address, value, token_symbol):
|
||||||
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.
|
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.
|
||||||
@@ -213,8 +265,9 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
[
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
from_address,
|
from_address,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -359,8 +412,9 @@ class Api:
|
|||||||
|
|
||||||
if register:
|
if register:
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
[
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
'ACCOUNT_REGISTRY_WRITER',
|
'ACCOUNT_REGISTRY_WRITER',
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -397,14 +451,15 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
[
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
'GAS_GIFTER',
|
'GAS_GIFTER',
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_refill = celery.signature(
|
s_refill = celery.signature(
|
||||||
'cic_eth.eth.tx.refill_gas',
|
'cic_eth.eth.gas.refill_gas',
|
||||||
[
|
[
|
||||||
self.chain_spec.asdict(),
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
@@ -439,8 +494,9 @@ class Api:
|
|||||||
"""
|
"""
|
||||||
offset = 0
|
offset = 0
|
||||||
s_local = celery.signature(
|
s_local = celery.signature(
|
||||||
'cic_eth.queue.tx.get_account_tx',
|
'cic_eth.queue.query.get_account_tx',
|
||||||
[
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@@ -464,9 +520,9 @@ class Api:
|
|||||||
s_external_get = celery.signature(
|
s_external_get = celery.signature(
|
||||||
external_task,
|
external_task,
|
||||||
[
|
[
|
||||||
address,
|
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
|
address,
|
||||||
],
|
],
|
||||||
queue=external_queue,
|
queue=external_queue,
|
||||||
)
|
)
|
||||||
|
|||||||
52
apps/cic-eth/cic_eth/api/base.py
Normal file
52
apps/cic-eth/cic_eth/api/base.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class ApiBase:
|
||||||
|
"""Creates task chains to perform well-known CIC operations.
|
||||||
|
|
||||||
|
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
|
||||||
|
|
||||||
|
:param callback_param: Static value to pass to callback
|
||||||
|
:type callback_param: str
|
||||||
|
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
|
||||||
|
:type callback_task: string
|
||||||
|
:param queue: Name of worker queue to submit tasks to
|
||||||
|
:type queue: str
|
||||||
|
"""
|
||||||
|
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop.noop', callback_queue=None):
|
||||||
|
self.chain_str = chain_str
|
||||||
|
self.chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
self.callback_param = callback_param
|
||||||
|
self.callback_task = callback_task
|
||||||
|
self.queue = queue
|
||||||
|
logg.debug('api using queue {}'.format(self.queue))
|
||||||
|
self.callback_success = None
|
||||||
|
self.callback_error = None
|
||||||
|
if callback_queue == None:
|
||||||
|
callback_queue=self.queue
|
||||||
|
|
||||||
|
if callback_param != None:
|
||||||
|
self.callback_success = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
0,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
self.callback_error = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
1,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
8
apps/cic-eth/cic_eth/check/db.py
Normal file
8
apps/cic-eth/cic_eth/check/db.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
|
||||||
|
def health(*args, **kwargs):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
session.execute('SELECT count(*) from alembic_version')
|
||||||
|
session.close()
|
||||||
|
return True
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user