Compare commits
183 Commits
bvander/fl
...
lash/verif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29423449b7 | ||
|
|
8855ccd3d2 | ||
|
|
09dfdbb38a | ||
| 1abb642361 | |||
| 93bcbd7d51 | |||
|
|
818899670a | ||
|
|
1882910a8e | ||
| 3cc909c936 | |||
| 60b6e1abdb | |||
|
|
9c7e72f71c | ||
|
|
e3acc1757a | ||
|
|
8250b15d32 | ||
|
|
31d7cf5789 | ||
|
|
2544c159c2 | ||
|
|
7691d9a127 | ||
|
|
a2a3634683 | ||
|
|
fe0835a4e7 | ||
|
|
7ce68021bd | ||
| d8f51c5bdd | |||
|
|
13fb67d2d8 | ||
|
|
8f1afa094d | ||
|
|
1d9f134125 | ||
|
|
b6a4bab1c8 | ||
| 805fc56c7b | |||
|
|
e3d39a2144 | ||
|
|
90176f2806 | ||
| f42f223ce9 | |||
|
|
045a279ac2 | ||
|
|
71a7e3d3d5
|
||
|
|
335b7b30a4
|
||
|
|
3b1f470ddf
|
||
|
|
4c9f20aa7f
|
||
|
|
980191be4f
|
||
|
|
6b59c87f68 | ||
|
|
9ee42b908d | ||
| 3343c4163e | |||
|
|
f5bfc8ace2 | ||
| 8a9d2ee0be | |||
| 3608fd1fc7 | |||
| 0d275f358b | |||
|
3aef2aa65f
|
|||
| 5644baefb2 | |||
|
1a7c4deab6
|
|||
| 0389d8623d | |||
|
cf64387d81
|
|||
| 79bcc8a9f1 | |||
| 7b57f1b4c2 | |||
| 76b8519637 | |||
| e89aec76fa | |||
| a138a0ec75 | |||
| 5128c7828c | |||
| 2f005195e5 | |||
| fb8db3ffd2 | |||
| b5f647c4aa | |||
| 6019143ba1 | |||
| 610440b722 | |||
|
|
d65455fc29 | ||
| 43f8d1c30c | |||
| b855211eed | |||
| 1e0c475f39 | |||
|
|
3e6cf594e3 | ||
| b8f79a2dd1 | |||
| 540c2fd950 | |||
| b9b06eced8 | |||
| 949bb29379 | |||
|
|
0468906601 | ||
|
|
471243488e
|
||
| 3c4acd82ff | |||
| e07f992c5a | |||
| 17e95cb19c | |||
| 3c3a97ce15 | |||
| a492be4927 | |||
|
|
1f555748b0 | ||
| 8aa4d20eea | |||
|
|
90cf24dcee | ||
|
|
75b711dbd5 | ||
| c21c1eb2ef | |||
| eb5e612105 | |||
| e017d11770 | |||
| e327af68e1 | |||
| 92cc6a3f27 | |||
| f42bf7754a | |||
|
|
7342927e91
|
||
| 17333af88f | |||
| 6a68d2ed32 | |||
|
|
ef77f4c99a | ||
|
|
56dbe8a502 | ||
|
|
2dc8ac6a12 | ||
|
|
0ced68e224 | ||
| 2afb20e715 | |||
| 3b0113d0e4 | |||
|
|
ebf4743a84 | ||
|
|
3bf92e7a8a | ||
| f0b4c42c68 | |||
|
|
b62d00180c | ||
|
|
a49978cc36 | ||
| 1b0ee269d0 | |||
| aa2f363b27 | |||
| 2a24ce6938 | |||
| 938a10b5c3 | |||
|
|
76e33e578b | ||
|
|
2ec4262734 | ||
|
|
7684fe3883 | ||
|
|
995a148c6a | ||
|
|
511e099689 | ||
|
|
f877218c55 | ||
| 8ac9a1e99a | |||
| c4cb095a29 | |||
| 05b8bbbbca | |||
| 1ce32fbbe0 | |||
| 3fd5e77e2c | |||
| e27a49ef33 | |||
|
|
fffb2bc3f4 | ||
|
|
8910fb0759 | ||
|
|
c84239c820 | ||
|
|
452047b900 | ||
|
|
b8be457c41 | ||
|
|
0ec9813e5f | ||
|
|
defa7797dc | ||
|
|
bb3d38a1f9 | ||
| 3be1c1b33d | |||
|
|
d6c763f2d7 | ||
| b7942ddcfa | |||
|
8de5dc1540
|
|||
| fad0a4b580 | |||
| 0672a17d2e | |||
| f764b73f66 | |||
| 806b82504f | |||
| ac76e14129 | |||
| 1c78f4d6d6 | |||
| 0d6e228f8a | |||
| 7a3cb7ab75 | |||
| 992c7b4022 | |||
| f19173001e | |||
|
|
f82bb4515d | ||
| 24e6db7d87 | |||
| ecdfb9bc5a | |||
| 30415ac997 | |||
| d5a8b77349 | |||
|
|
ed2521b582 | ||
|
|
395930106a | ||
|
|
ee1452e530 | ||
|
|
8cdaf9f28a
|
||
|
|
402b968b6d | ||
|
|
aa13517534 | ||
|
|
884b18f2f1 | ||
|
|
494a8f3e88 | ||
|
|
1214f605a7 | ||
|
|
0783a6001c | ||
|
|
f9594b766a | ||
|
|
561ae62d5e | ||
|
|
d6782abbcc | ||
|
|
8f173fa30b | ||
|
|
3741cb3283 | ||
|
|
54dd5acb62 | ||
| c52885a016 | |||
| f0dd257e05 | |||
| e8c870d230 | |||
| 4bb36a448d | |||
| 231163e2fc | |||
| e599933ef8 | |||
| 266bc3362d | |||
| 8350381754 | |||
| 6ddeacf036 | |||
| fe017d2b0f | |||
| d7973436e6 | |||
| 5025c31af6 | |||
|
|
021c736707 | ||
|
|
e3028a5060 | ||
|
|
5f6c57647f | ||
|
|
ed029a936c | ||
|
|
c559bb2fee | ||
|
|
9b79034ed3 | ||
| b7d5c6799f | |||
| eef8bb2cf7 | |||
| cf96fee430 | |||
| 9740963431 | |||
|
|
a3c4932488 | ||
|
|
aa667951be | ||
|
|
c2459cfd65 | ||
|
|
e7102ff02d | ||
| a942c785f6 | |||
| 70704b09ec |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -12,6 +12,6 @@ build/
|
||||
**/.nyc_output
|
||||
**/coverage
|
||||
**/.venv
|
||||
**/venv
|
||||
**/dist
|
||||
.idea
|
||||
**/.vim
|
||||
**/*secret.yaml
|
||||
|
||||
@@ -1,14 +1,79 @@
|
||||
include:
|
||||
- local: 'ci_templates/.cic-template.yml'
|
||||
- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'ci_templates/.cic-template.yml' #kaniko build templates
|
||||
# these includes are app specific unit tests
|
||||
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||
- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
#- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- version
|
||||
- build
|
||||
- test
|
||||
- publish
|
||||
- deploy
|
||||
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
|
||||
|
||||
variables:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||
CI_DEBUG_TRACE: "true"
|
||||
SEMVERBOT_VERSION: "0.2.0"
|
||||
|
||||
#before_script:
|
||||
# - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
|
||||
version:
|
||||
#image: python:3.7-stretch
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-base-images/ci-version:b01318ae
|
||||
stage: version
|
||||
script:
|
||||
- mkdir -p ~/.ssh && chmod 700 ~/.ssh
|
||||
- ssh-keyscan gitlab.com >> ~/.ssh/known_hosts && chmod 644 ~/.ssh/known_hosts
|
||||
- eval $(ssh-agent -s)
|
||||
- ssh-add <(echo "$SSH_PRIVATE_KEY")
|
||||
- git remote set-url origin git@gitlab.com:grassrootseconomics/cic-internal-integration.git
|
||||
- export TAG=$(sbot predict version -m auto)
|
||||
- |
|
||||
if [[ -z $TAG ]]
|
||||
then
|
||||
echo "tag could not be set $@"
|
||||
exit 1
|
||||
fi
|
||||
- echo $TAG > version
|
||||
- git tag -a v$TAG -m "ci tagged"
|
||||
- git push origin v$TAG
|
||||
artifacts:
|
||||
paths:
|
||||
- version
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_REF_NAME == "master"
|
||||
when: always
|
||||
|
||||
# runs on protected branches and pushes to repo
|
||||
build-push:
|
||||
stage: build
|
||||
tags:
|
||||
- integration
|
||||
#script:
|
||||
# - TAG=$CI_Cbefore_script:
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
script:
|
||||
- TAG=latest ./scripts/build-push.sh
|
||||
- TAG=$(cat ./version) ./scripts/build-push.sh
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_REF_NAME == "master"
|
||||
when: always
|
||||
|
||||
deploy-dev:
|
||||
stage: deploy
|
||||
trigger: grassrootseconomics/devops
|
||||
when: manual
|
||||
|
||||
16
.semverbot.toml
Normal file
16
.semverbot.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[git]
|
||||
|
||||
[git.config]
|
||||
email = "semverbot@grassroots.org"
|
||||
name = "semvervot"
|
||||
|
||||
[git.tags]
|
||||
prefix = "v"
|
||||
|
||||
[semver]
|
||||
mode = "git-commit"
|
||||
|
||||
[semver.detection]
|
||||
patch = ["fix", "[fix]", "patch", "[patch]"]
|
||||
minor = ["minor", "[minor]", "feat", "[feat]", "release", "[release]", "bump", "[bump]"]
|
||||
major = ["BREAKING CHANGE"]
|
||||
33
README.md
33
README.md
@@ -2,25 +2,22 @@
|
||||
|
||||
## Getting started
|
||||
|
||||
## Make some keys
|
||||
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
|
||||
|
||||
|
||||
```
|
||||
docker build -t bloxie . && docker run -v "$(pwd)/keys:/root/keys" --rm -it -t bloxie account new --chain /root/bloxberg.json --keys-path /root/keys
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
```
|
||||
|
||||
|
||||
### Prepare the repo
|
||||
|
||||
This is stuff we need to put in makefile but for now...
|
||||
|
||||
File mounts and permisssions need to be set
|
||||
start services, database, redis and local ethereum node
|
||||
```
|
||||
chmod -R 755 scripts/initdb apps/cic-meta/scripts/initdb
|
||||
````
|
||||
|
||||
start cluster
|
||||
docker-compose up -d
|
||||
```
|
||||
docker-compose up
|
||||
|
||||
Run app/contract-migration to deploy contracts
|
||||
```
|
||||
RUN_MASK=3 docker-compose up contract-migration
|
||||
```
|
||||
|
||||
stop cluster
|
||||
@@ -28,9 +25,9 @@ stop cluster
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
delete data
|
||||
stop cluster and delete data
|
||||
```
|
||||
docker-compose down -v
|
||||
docker-compose down -v --remove-orphans
|
||||
```
|
||||
|
||||
rebuild an images
|
||||
@@ -38,5 +35,7 @@ rebuild an images
|
||||
docker-compose up --build <service_name>
|
||||
```
|
||||
|
||||
Deployment variables are writtend to service-configs/.env after everthing is up.
|
||||
|
||||
to delete the buildkit cache
|
||||
```
|
||||
docker builder prune --filter type=exec.cachemount
|
||||
```
|
||||
|
||||
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
||||
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
||||
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
||||
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
7
apps/cic-base/.gitignore
vendored
7
apps/cic-base/.gitignore
vendored
@@ -1,7 +0,0 @@
|
||||
gmon.out
|
||||
__pycache__
|
||||
*.pyc
|
||||
inc.sh
|
||||
*.egg-info
|
||||
build/
|
||||
dist/
|
||||
@@ -1,20 +0,0 @@
|
||||
.cic_base_variables:
|
||||
variables:
|
||||
APP_NAME: cic-base
|
||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
||||
|
||||
.cic_base_changes_target:
|
||||
rules:
|
||||
- changes:
|
||||
- $CONTEXT/$APP_NAME/*
|
||||
|
||||
build-mr-cic-base:
|
||||
extends:
|
||||
- .cic_base_changes_target
|
||||
- .py_build_merge_request
|
||||
- .cic_base_variables
|
||||
|
||||
publish_python:
|
||||
extends:
|
||||
- .publish_python
|
||||
- .cic_base_variables
|
||||
@@ -1,8 +0,0 @@
|
||||
#from . import (
|
||||
# config,
|
||||
# argparse,
|
||||
# rpc,
|
||||
# signer,
|
||||
# log,
|
||||
# version,
|
||||
# )
|
||||
@@ -1,87 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
# external imports
|
||||
from xdg.BaseDirectory import (
|
||||
xdg_config_dirs,
|
||||
load_first_config,
|
||||
)
|
||||
|
||||
logg = logging.getLogger(__file__)
|
||||
|
||||
fallback_config_path = '/usr/local/etc'
|
||||
xdg_config_dirs += [fallback_config_path]
|
||||
default_config_dir = load_first_config('cic')
|
||||
if default_config_dir == None:
|
||||
default_config_dir = os.path.join('.', '.cic')
|
||||
env_config_dir = os.environ.get('CONFINI_DIR', default_config_dir)
|
||||
|
||||
full_template = {
|
||||
# (long arg and key name, short var, type, default, help,)
|
||||
'provider': ('p', str, None, 'RPC provider url',),
|
||||
'registry_address': ('r', str, None, 'CIC registry address',),
|
||||
'keystore_file': ('y', str, None, 'Keystore file',),
|
||||
'config_dir': ('c', str, env_config_dir, 'Configuration directory',),
|
||||
'queue': ('q', str, 'cic-eth', 'Celery task queue',),
|
||||
'chain_spec': ('i', str, None, 'Chain spec string',),
|
||||
'env_prefix': (None, str, os.environ.get('CONFINI_ENV_PREFIX'), 'Environment prefix for variables to overwrite configuration',),
|
||||
}
|
||||
|
||||
default_include_args = [
|
||||
'config_dir',
|
||||
'provider',
|
||||
'env_prefix',
|
||||
]
|
||||
|
||||
sub = None
|
||||
|
||||
def create(caller_dir, include_args=default_include_args):
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
|
||||
for k in include_args:
|
||||
a = full_template[k]
|
||||
long_flag = '--' + k.replace('_', '-')
|
||||
short_flag = None
|
||||
dest = None
|
||||
if a[0] != None:
|
||||
short_flag = '-' + a[0]
|
||||
dest = a[0]
|
||||
else:
|
||||
dest = k
|
||||
default = a[2]
|
||||
if default == None and k == 'config_dir':
|
||||
default = os.path.join(os.getcwd(), 'config')
|
||||
|
||||
if short_flag == None:
|
||||
argparser.add_argument(long_flag, dest=dest, type=a[1], default=default, help=a[3])
|
||||
else:
|
||||
argparser.add_argument(short_flag, long_flag, dest=dest, type=a[1], default=default, help=a[3])
|
||||
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
|
||||
return argparser
|
||||
|
||||
|
||||
def add(argparser, processor, name, description=None):
|
||||
processor(argparser)
|
||||
|
||||
return argparser
|
||||
|
||||
|
||||
def parse(argparser, logger=None):
|
||||
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
|
||||
# handle logging input
|
||||
if logger != None:
|
||||
if args.vv:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
return args
|
||||
@@ -1,52 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
from .error import ConfigError
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
default_arg_overrides = {
|
||||
'p': 'ETH_PROVIDER',
|
||||
'i': 'CIC_CHAIN_SPEC',
|
||||
'r': 'CIC_REGISTRY_ADDRESS',
|
||||
}
|
||||
|
||||
|
||||
def override(config, override_dict, label):
|
||||
config.dict_override(override_dict, label)
|
||||
config.validate()
|
||||
return config
|
||||
|
||||
|
||||
def create(config_dir, args, env_prefix=None, arg_overrides=default_arg_overrides):
|
||||
# handle config input
|
||||
config = None
|
||||
try:
|
||||
config = confini.Config(config_dir, env_prefix)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if config == None:
|
||||
raise ConfigError('directory {} not found'.format(config_dir))
|
||||
|
||||
config.process()
|
||||
if arg_overrides != None and args != None:
|
||||
override_dict = {}
|
||||
for k in arg_overrides:
|
||||
v = getattr(args, k)
|
||||
if v != None:
|
||||
override_dict[arg_overrides[k]] = v
|
||||
config = override(config, override_dict, 'args')
|
||||
else:
|
||||
config.validate()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def log(config):
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
@@ -1,2 +0,0 @@
|
||||
class ConfigError(Exception):
|
||||
pass
|
||||
@@ -1,18 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
|
||||
default_mutelist = [
|
||||
'urllib3',
|
||||
'websockets.protocol',
|
||||
'web3.RequestManager',
|
||||
'web3.providers.WebsocketProvider',
|
||||
'web3.providers.HTTPProvider',
|
||||
]
|
||||
|
||||
def create(name=None, mutelist=default_mutelist):
|
||||
logg = logging.getLogger(name)
|
||||
for m in mutelist:
|
||||
logging.getLogger(m).setLevel(logging.CRITICAL)
|
||||
return logg
|
||||
@@ -1,13 +0,0 @@
|
||||
# external imports
|
||||
from chainlib.connection import RPCConnection
|
||||
from chainlib.eth.connection import EthUnixSignerConnection
|
||||
from chainlib.eth.sign import (
|
||||
sign_transaction,
|
||||
sign_message,
|
||||
)
|
||||
|
||||
|
||||
def setup(chain_spec, evm_provider, signer_provider=None):
|
||||
RPCConnection.register_location(evm_provider, chain_spec, 'default')
|
||||
if signer_provider != None:
|
||||
RPCConnection.register_location(signer_provider, chain_spec, 'signer', constructor=EthUnixSignerConnection)
|
||||
@@ -1,27 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import os
|
||||
|
||||
# external imports
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
keystore = DictKeystore()
|
||||
|
||||
default_passphrase = os.environ.get('ETH_PASSPHRASE', '')
|
||||
|
||||
|
||||
def from_keystore(keyfile, passphrase=default_passphrase):
|
||||
global keystore
|
||||
|
||||
# signer
|
||||
if keyfile == None:
|
||||
raise ValueError('please specify signer keystore file')
|
||||
|
||||
logg.debug('loading keystore file {}'.format(keyfile))
|
||||
address = keystore.import_keystore_file(keyfile, password=passphrase)
|
||||
|
||||
signer = EIP155Signer(keystore)
|
||||
return (address, signer,)
|
||||
@@ -1,122 +0,0 @@
|
||||
# stanard imports
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
from sqlalchemy import Column, Integer
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import (
|
||||
StaticPool,
|
||||
QueuePool,
|
||||
AssertionPool,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
Model = declarative_base(name='Model')
|
||||
|
||||
|
||||
class SessionBase(Model):
|
||||
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
|
||||
"""
|
||||
__abstract__ = True
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
engine = None
|
||||
"""Database connection engine of the running aplication"""
|
||||
sessionmaker = None
|
||||
"""Factory object responsible for creating sessions from the connection pool"""
|
||||
transactional = True
|
||||
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
|
||||
poolable = True
|
||||
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
|
||||
procedural = True
|
||||
"""Whether the database backend supports stored procedures"""
|
||||
localsessions = {}
|
||||
"""Contains dictionary of sessions initiated by db model components"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def create_session():
|
||||
"""Creates a new database session.
|
||||
"""
|
||||
return SessionBase.sessionmaker()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _set_engine(engine):
|
||||
"""Sets the database engine static property
|
||||
"""
|
||||
SessionBase.engine = engine
|
||||
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def connect(dsn, pool_size=8, debug=False):
|
||||
"""Create new database connection engine and connect to database backend.
|
||||
|
||||
:param dsn: DSN string defining connection.
|
||||
:type dsn: str
|
||||
"""
|
||||
e = None
|
||||
if SessionBase.poolable:
|
||||
poolclass = QueuePool
|
||||
if pool_size > 1:
|
||||
e = create_engine(
|
||||
dsn,
|
||||
max_overflow=pool_size*3,
|
||||
pool_pre_ping=True,
|
||||
pool_size=pool_size,
|
||||
pool_recycle=60,
|
||||
poolclass=poolclass,
|
||||
echo=debug,
|
||||
)
|
||||
else:
|
||||
if debug:
|
||||
poolclass = AssertionPool
|
||||
else:
|
||||
poolclass = StaticPool
|
||||
|
||||
e = create_engine(
|
||||
dsn,
|
||||
poolclass=poolclass,
|
||||
echo=debug,
|
||||
)
|
||||
else:
|
||||
e = create_engine(
|
||||
dsn,
|
||||
echo=debug,
|
||||
)
|
||||
|
||||
SessionBase._set_engine(e)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def disconnect():
|
||||
"""Disconnect from database and free resources.
|
||||
"""
|
||||
SessionBase.engine.dispose()
|
||||
SessionBase.engine = None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def bind_session(session=None):
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
localsession_key = str(id(localsession))
|
||||
logg.debug('creating new session {}'.format(localsession_key))
|
||||
SessionBase.localsessions[localsession_key] = localsession
|
||||
return localsession
|
||||
|
||||
|
||||
@staticmethod
|
||||
def release_session(session=None):
|
||||
session.flush()
|
||||
session_key = str(id(session))
|
||||
if SessionBase.localsessions.get(session_key) != None:
|
||||
logg.debug('destroying session {}'.format(session_key))
|
||||
session.commit()
|
||||
session.close()
|
||||
@@ -1,43 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import time
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import semver
|
||||
|
||||
version = (
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
'beta.22',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
minor=version[1],
|
||||
patch=version[2],
|
||||
prerelease=version[3],
|
||||
)
|
||||
|
||||
|
||||
def git_hash():
|
||||
import subprocess
|
||||
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
|
||||
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
|
||||
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
|
||||
return git_hash_brief
|
||||
|
||||
version_string = str(version_object)
|
||||
|
||||
try:
|
||||
version_git = git_hash()
|
||||
version_string += '+build.{}'.format(version_git)
|
||||
except FileNotFoundError:
|
||||
time_string_pair = str(time.time()).split('.')
|
||||
version_string += '+build.{}{:<09d}'.format(
|
||||
time_string_pair[0],
|
||||
int(time_string_pair[1]),
|
||||
)
|
||||
|
||||
__version_string__ = version_string
|
||||
@@ -1,26 +0,0 @@
|
||||
FROM python:3.8.6-slim-buster
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y git gcc g++ libpq-dev && \
|
||||
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client cargo
|
||||
|
||||
WORKDIR /usr/src/cic-base
|
||||
COPY . /usr/src/cic-base/
|
||||
|
||||
#RUN mkdir python
|
||||
#WORKDIR ./python
|
||||
#COPY ./pep503.sh .
|
||||
#RUN pip download --no-cache-dir --extra-index-url https://pip.grassrootseconomics.net:8433 cic-base[full_graph]==0.1.1a6
|
||||
RUN pip install -r requirements.txt
|
||||
RUN python setup.py bdist_wheel
|
||||
RUN pip download --extra-index-url https://pip.grassrootseconomics.net:8433 dist/$(basename $(ls dist/*))
|
||||
RUN mkdir packages && \
|
||||
cd packages && \
|
||||
bash ../docker/pep503.sh ..
|
||||
|
||||
WORKDIR /usr/src/cic-base/packages
|
||||
|
||||
RUN ls
|
||||
RUN ls ..
|
||||
|
||||
ENTRYPOINT ["python", "-m", "http.server", "8080"]
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/usr/bin/env
|
||||
|
||||
dest=`pwd`
|
||||
d=$1
|
||||
for df in `find $d -name "*.whl" -type f`; do
|
||||
f=`basename $df`
|
||||
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
|
||||
mkdir -v $dest/$pd
|
||||
mv -v $df $dest/$pd/
|
||||
done
|
||||
for df in `find $d -name "*.tar.gz" -type f`; do
|
||||
f=`basename $df`
|
||||
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
|
||||
mkdir -v $dest/$pd
|
||||
mv -v $df $dest/$pd/
|
||||
done
|
||||
for df in `find $d -name "*.zip" -type f`; do
|
||||
f=`basename $df`
|
||||
pd=`echo $f | sed -e "s/^\(.*\)-[[:digit:]]*\.[[:digit:]].*$/\1/g" | tr "[:upper:]" "[:lower:]" | tr "_" "-"`
|
||||
mkdir -v $dest/$pd
|
||||
mv -v $df $dest/$pd/
|
||||
done
|
||||
@@ -1,20 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import cic_base.argparse
|
||||
import cic_base.config
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def more_argparse(argparser):
|
||||
argparser.add_argument('--foo', type=str, help='foo')
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
|
||||
argparser = cic_base.argparse.create(script_dir, include_args=cic_base.argparse.full_template)
|
||||
args = cic_base.argparse.parse(argparser, logger=logg)
|
||||
config = cic_base.config.create(args.c, args, env_prefix=args.env_prefix)
|
||||
|
||||
cic_base.config.log(config)
|
||||
@@ -1,3 +0,0 @@
|
||||
[foo]
|
||||
bar = 42
|
||||
baz = xyzzy
|
||||
@@ -1,98 +0,0 @@
|
||||
africastalking==1.2.3
|
||||
alembic==1.4.2
|
||||
amqp==2.6.1
|
||||
attrs==20.3.0
|
||||
base58==2.1.0
|
||||
bcrypt==3.2.0
|
||||
billiard==3.6.3.0
|
||||
bip-utils==1.4.0
|
||||
bitarray==1.2.2
|
||||
blake2b-py==0.1.4
|
||||
cached-property==1.5.2
|
||||
celery==4.4.7
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.3
|
||||
chainlib==0.0.1a18
|
||||
chainsyncer==0.0.1a18
|
||||
chardet==3.0.4
|
||||
confini==0.3.6rc3
|
||||
contextlib2==0.6.0.post1
|
||||
coverage==5.4
|
||||
cryptography==3.2.1
|
||||
cytoolz==0.11.0
|
||||
ecdsa==0.16.1
|
||||
ecuth==0.4.5a1
|
||||
Faker==4.17.1
|
||||
hexathon==0.0.1a3
|
||||
hexbytes==0.2.1
|
||||
http-hoba-auth==0.2.0
|
||||
idna==2.10
|
||||
iniconfig==1.1.1
|
||||
ipfshttpclient==0.6.1
|
||||
json-rpc==1.13.0
|
||||
jsonschema==3.2.0
|
||||
kombu==4.6.11
|
||||
lru-dict==1.1.7
|
||||
Mako==1.1.3
|
||||
MarkupSafe==1.1.1
|
||||
mirakuru==2.3.0
|
||||
moolb==0.1.1b2
|
||||
more-itertools==8.7.0
|
||||
multiaddr==0.0.9
|
||||
mypy-extensions==0.4.3
|
||||
netaddr==0.8.0
|
||||
packaging==20.9
|
||||
parsimonious==0.8.1
|
||||
phonenumbers==8.12.12
|
||||
pluggy==0.13.1
|
||||
port-for==0.4
|
||||
protobuf==3.15.1
|
||||
psutil==5.8.0
|
||||
psycopg2==2.8.6
|
||||
py==1.9.0
|
||||
py-ecc==4.1.0
|
||||
py-eth==0.1.1
|
||||
pycparser==2.20
|
||||
pycryptodome==3.10.1
|
||||
pyethash==0.1.27
|
||||
pyparsing==2.4.7
|
||||
pyrsistent==0.17.3
|
||||
pysha3==1.0.2
|
||||
pytest==6.0.1
|
||||
pytest-alembic==0.2.5
|
||||
pytest-celery==0.0.0a1
|
||||
pytest-cov==2.10.1
|
||||
pytest-mock==3.3.1
|
||||
pytest-redis==2.0.0
|
||||
python-dateutil==2.8.1
|
||||
python-editor==1.0.4
|
||||
python-gnupg==0.4.6
|
||||
python-i18n==0.3.9
|
||||
pytz==2021.1
|
||||
PyYAML==5.3.1
|
||||
redis==3.5.3
|
||||
requests==2.24.0
|
||||
rlp==2.0.1
|
||||
schema==0.7.4
|
||||
semantic-version==2.8.5
|
||||
semver==2.13.0
|
||||
six==1.15.0
|
||||
sortedcontainers==2.3.0
|
||||
SQLAlchemy==1.3.20
|
||||
sqlparse==0.4.1
|
||||
text-unidecode==1.3
|
||||
tinydb==4.2.0
|
||||
toml==0.10.2
|
||||
toolz==0.11.1
|
||||
transitions==0.8.4
|
||||
trie==2.0.0a5
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.25.11
|
||||
uWSGI==2.0.19.1
|
||||
varint==1.0.2
|
||||
vine==1.3.0
|
||||
vobject==0.9.6.1
|
||||
web3==5.12.2
|
||||
websocket-client==0.57.0
|
||||
websockets==8.1
|
||||
yaml-acl==0.0.1
|
||||
@@ -1,44 +0,0 @@
|
||||
africastalking==1.2.3
|
||||
alembic==1.4.2
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
confini==0.3.6rc3
|
||||
crypto-dev-signer==0.4.14b4
|
||||
cryptography==3.2.1
|
||||
ecuth==0.4.5a5
|
||||
eth-accounts-index==0.0.11a14
|
||||
eth-address-index==0.1.1a12
|
||||
eth-contract-registry==0.5.5a3
|
||||
erc20-transfer-authorization==0.3.1a7
|
||||
erc20-faucet==0.2.1a5
|
||||
faker==4.17.1
|
||||
http-hoba-auth==0.2.1a2
|
||||
moolb==0.1.1b2
|
||||
phonenumbers==8.12.12
|
||||
psycopg2==2.8.6
|
||||
py-eth~=0.1.1
|
||||
pytest==6.0.1
|
||||
pytest-alembic==0.2.5
|
||||
pytest-celery==0.0.0a1
|
||||
pytest-cov==2.10.1
|
||||
pytest-mock==3.3.1
|
||||
pytest-redis==2.0.0
|
||||
python-i18n==0.3.9
|
||||
PyYAML==5.3.1
|
||||
redis==3.5.3
|
||||
requests==2.24.0
|
||||
semver==2.13.0
|
||||
SQLAlchemy==1.3.20
|
||||
sqlparse==0.4.1
|
||||
tinydb==4.2.0
|
||||
transitions==0.8.4
|
||||
uWSGI==2.0.19.1
|
||||
vobject==0.9.6.1
|
||||
web3==5.12.2
|
||||
websockets==8.1
|
||||
yaml-acl==0.0.1
|
||||
rlp==2.0.1
|
||||
cryptocurrency-cli-tools==0.0.5
|
||||
websocket-client==0.57.0
|
||||
hexathon==0.0.1a7
|
||||
chainsyncer~=0.0.2a5
|
||||
@@ -1,40 +0,0 @@
|
||||
confini==0.3.6rc3
|
||||
crypto-dev-signer==0.4.14b1
|
||||
semver==2.13.0
|
||||
SQLAlchemy==1.3.20
|
||||
pyxdg==0.27
|
||||
chainlib==0.0.2a10
|
||||
alembic==1.4.2
|
||||
celery==4.4.7
|
||||
cryptography==3.2.1
|
||||
ecuth==0.4.5a1
|
||||
eth-accounts-index==0.0.11a8
|
||||
eth-address-index==0.1.1a8
|
||||
eth-contract-registry==0.5.4a9
|
||||
erc20-transfer-authorization==0.3.1a4
|
||||
erc20-single-shot-faucet==0.2.0a11
|
||||
faker==4.17.1
|
||||
http-hoba-auth==0.2.0
|
||||
moolb==0.1.1b2
|
||||
phonenumbers==8.12.12
|
||||
psycopg2==2.8.6
|
||||
python-i18n==0.3.9
|
||||
PyYAML==5.3.1
|
||||
redis==3.5.3
|
||||
requests==2.24.0
|
||||
sqlparse==0.4.1
|
||||
transitions==0.8.4
|
||||
uWSGI==2.0.19.1
|
||||
vobject==0.9.6.1
|
||||
web3==5.12.2
|
||||
websockets==8.1
|
||||
yaml-acl==0.0.1
|
||||
rlp==2.0.1
|
||||
cryptocurrency-cli-tools==0.0.4
|
||||
giftable-erc20-token==0.0.8a8
|
||||
websocket-client==0.57.0
|
||||
hexathon==0.0.1a7
|
||||
chainsyncer==0.0.2b1
|
||||
sarafu-faucet==0.0.2a20
|
||||
cic-types==0.1.0a10
|
||||
cic-eth-registry==0.5.4a13
|
||||
@@ -1,9 +0,0 @@
|
||||
confini==0.3.6rc3
|
||||
crypto-dev-signer==0.4.14b4
|
||||
semver==2.13.0
|
||||
SQLAlchemy==1.3.20
|
||||
pyxdg==0.27
|
||||
chainlib==0.0.3rc3
|
||||
eth-erc20==0.0.9a4
|
||||
liveness==0.0.1a7
|
||||
requirements-magic~=0.0.1a2
|
||||
@@ -1,50 +0,0 @@
|
||||
from setuptools import setup
|
||||
import configparser
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
re_v = r'[~><=]='
|
||||
def merge(requirements_files, base_dir='.'):
|
||||
|
||||
requirements = {}
|
||||
for r in requirements_files:
|
||||
filepath = os.path.join(base_dir, r)
|
||||
logg.debug('reading {}'.format(filepath))
|
||||
f = open(filepath, 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
l = l.rstrip()
|
||||
m = re.split(re_v, l)
|
||||
k = m[0]
|
||||
if k == None:
|
||||
raise ValueError('invalid requirement line {}'.format(l))
|
||||
if requirements.get(k) == None:
|
||||
logg.info('adding {} -> {}'.format(k, l))
|
||||
requirements[k] = l
|
||||
else:
|
||||
logg.debug('skipping {}'.format(l))
|
||||
f.close()
|
||||
|
||||
return list(requirements.values())
|
||||
|
||||
|
||||
requirements = []
|
||||
f = open('requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
|
||||
|
||||
setup(
|
||||
install_requires=requirements,
|
||||
)
|
||||
@@ -1,22 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import unittest
|
||||
|
||||
# external imports
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
# local imports
|
||||
from cic_base.rpc import setup as rpc_setup
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
class TestBase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.chain_spec = ChainSpec('evm', 'foo', 42)
|
||||
rpc_setup(self.chain_spec, 'http://localhost:8545', signer_provider='ipc://tmp/foo')
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
@@ -1,14 +0,0 @@
|
||||
# standard imports
|
||||
import unittest
|
||||
|
||||
# local imports
|
||||
from tests.base import TestBase
|
||||
|
||||
|
||||
class TestBasic(TestBase):
|
||||
|
||||
def test_basic(self):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -2,4 +2,6 @@
|
||||
omit =
|
||||
.venv/*
|
||||
scripts/*
|
||||
cic_cache/db/postgres/*
|
||||
cic_cache/db/migrations/*
|
||||
cic_cache/version.py
|
||||
cic_cache/cli
|
||||
|
||||
4
apps/cic-cache/.dockerignore
Normal file
4
apps/cic-cache/.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
||||
.git
|
||||
.cache
|
||||
.dot
|
||||
**/doc
|
||||
@@ -1,22 +1,17 @@
|
||||
.cic_cache_variables:
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
||||
|
||||
.cic_cache_changes_target:
|
||||
rules:
|
||||
- changes:
|
||||
- $CONTEXT/$APP_NAME/*
|
||||
|
||||
build-mr-cic-cache:
|
||||
extends:
|
||||
- .cic_cache_changes_target
|
||||
- .py_build_merge_request
|
||||
- .cic_cache_variables
|
||||
|
||||
build-push-cic-cache:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_cache_variables
|
||||
|
||||
|
||||
build-test-cic-cache:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-cache
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
1
apps/cic-cache/MANIFEST.in
Normal file
1
apps/cic-cache/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
||||
include *requirements.txt cic_cache/data/config/*
|
||||
@@ -0,0 +1 @@
|
||||
# CIC-CACHE
|
||||
|
||||
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
||||
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
||||
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
||||
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
@@ -55,15 +55,37 @@ class Api:
|
||||
queue=callback_queue,
|
||||
)
|
||||
|
||||
def list(self, offset, limit, address=None):
|
||||
def list(self, offset=0, limit=100, address=None, oldest=False):
|
||||
s = celery.signature(
|
||||
'cic_cache.tasks.tx.tx_filter',
|
||||
[
|
||||
0,
|
||||
100,
|
||||
offset,
|
||||
limit,
|
||||
address,
|
||||
oldest,
|
||||
],
|
||||
queue=None
|
||||
queue=self.queue,
|
||||
)
|
||||
if self.callback_param != None:
|
||||
s.link(self.callback_success).on_error(self.callback_error)
|
||||
|
||||
t = s.apply_async()
|
||||
|
||||
return t
|
||||
|
||||
|
||||
def list_content(self, offset=0, limit=100, address=None, block_offset=None, block_limit=None, oldest=False):
|
||||
s = celery.signature(
|
||||
'cic_cache.tasks.tx.tx_filter_content',
|
||||
[
|
||||
offset,
|
||||
limit,
|
||||
address,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest,
|
||||
],
|
||||
queue=self.queue,
|
||||
)
|
||||
if self.callback_param != None:
|
||||
s.link(self.callback_success).on_error(self.callback_error)
|
||||
|
||||
@@ -10,11 +10,17 @@ from cic_cache.db.list import (
|
||||
list_transactions_mined,
|
||||
list_transactions_account_mined,
|
||||
list_transactions_mined_with_data,
|
||||
list_transactions_mined_with_data_index,
|
||||
list_transactions_account_mined_with_data_index,
|
||||
list_transactions_account_mined_with_data,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
DEFAULT_FILTER_SIZE = 8192 * 8
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
class Cache:
|
||||
|
||||
def __init__(self, session):
|
||||
@@ -25,12 +31,12 @@ class BloomCache(Cache):
|
||||
|
||||
@staticmethod
|
||||
def __get_filter_size(n):
|
||||
n = 8192 * 8
|
||||
n = DEFAULT_FILTER_SIZE
|
||||
logg.warning('filter size hardcoded to {}'.format(n))
|
||||
return n
|
||||
|
||||
|
||||
def load_transactions(self, offset, limit):
|
||||
def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
||||
|
||||
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
||||
@@ -47,7 +53,7 @@ class BloomCache(Cache):
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_mined(self.session, offset, limit)
|
||||
rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
@@ -56,7 +62,12 @@ class BloomCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r[0]
|
||||
else:
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
@@ -65,7 +76,7 @@ class BloomCache(Cache):
|
||||
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
||||
|
||||
|
||||
def load_transactions_account(self, address, offset, limit):
|
||||
def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
||||
|
||||
:param address: Address to retrieve transactions for.
|
||||
@@ -77,7 +88,7 @@ class BloomCache(Cache):
|
||||
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
||||
:rtype: tuple
|
||||
"""
|
||||
rows = list_transactions_account_mined(self.session, address, offset, limit)
|
||||
rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
|
||||
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
||||
@@ -86,7 +97,12 @@ class BloomCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
lowest_block = r[0]
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r[0]
|
||||
else:
|
||||
lowest_block = r[0]
|
||||
block = r[0].to_bytes(4, byteorder='big')
|
||||
tx = r[1].to_bytes(4, byteorder='big')
|
||||
f_block.add(block)
|
||||
@@ -97,8 +113,21 @@ class BloomCache(Cache):
|
||||
|
||||
class DataCache(Cache):
|
||||
|
||||
def load_transactions_with_data(self, offset, end):
|
||||
rows = list_transactions_mined_with_data(self.session, offset, end)
|
||||
def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
if limit == 0:
|
||||
limit = DEFAULT_LIMIT
|
||||
rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||
return self.__process_rows(rows, oldest)
|
||||
|
||||
|
||||
def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
||||
if limit == 0:
|
||||
limit = DEFAULT_LIMIT
|
||||
rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest)
|
||||
return self.__process_rows(rows, oldest)
|
||||
|
||||
|
||||
def __process_rows(self, rows, oldest):
|
||||
tx_cache = []
|
||||
highest_block = -1;
|
||||
lowest_block = -1;
|
||||
@@ -106,7 +135,12 @@ class DataCache(Cache):
|
||||
for r in rows:
|
||||
if highest_block == -1:
|
||||
highest_block = r['block_number']
|
||||
lowest_block = r['block_number']
|
||||
lowest_block = r['block_number']
|
||||
else:
|
||||
if oldest:
|
||||
highest_block = r['block_number']
|
||||
else:
|
||||
lowest_block = r['block_number']
|
||||
tx_type = 'unknown'
|
||||
|
||||
if r['value'] != None:
|
||||
|
||||
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
15
apps/cic-cache/cic_cache/cli/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# local imports
|
||||
from .base import *
|
||||
from .chain import (
|
||||
EthChainInterface,
|
||||
chain_interface,
|
||||
)
|
||||
from .rpc import RPC
|
||||
from .arg import ArgumentParser
|
||||
from .config import Config
|
||||
from .celery import CeleryApp
|
||||
from .registry import (
|
||||
connect_registry,
|
||||
connect_token_registry,
|
||||
connect_declarator,
|
||||
)
|
||||
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
20
apps/cic-cache/cic_cache/cli/arg.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# external imports
|
||||
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
|
||||
|
||||
# local imports
|
||||
from .base import (
|
||||
CICFlag,
|
||||
Flag,
|
||||
)
|
||||
|
||||
|
||||
class ArgumentParser(BaseArgumentParser):
|
||||
|
||||
def process_local_flags(self, local_arg_flags):
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-cache', help='Task queue')
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
|
||||
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
|
||||
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
31
apps/cic-cache/cic_cache/cli/base.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# standard imports
|
||||
import enum
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
argflag_std_read,
|
||||
argflag_std_write,
|
||||
argflag_std_base,
|
||||
Flag,
|
||||
)
|
||||
|
||||
class CICFlag(enum.IntEnum):
|
||||
|
||||
# celery - nibble 1
|
||||
CELERY = 1
|
||||
|
||||
# redis - nibble 2
|
||||
# REDIS = 16
|
||||
# REDIS_CALLBACK = 32
|
||||
|
||||
# chain - nibble 3
|
||||
CHAIN = 256
|
||||
|
||||
# sync - nibble 4
|
||||
SYNCER = 4096
|
||||
|
||||
|
||||
argflag_local_task = CICFlag.CELERY
|
||||
#argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
|
||||
argflag_local_chain = CICFlag.CHAIN
|
||||
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN
|
||||
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
24
apps/cic-cache/cic_cache/cli/celery.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CeleryApp:
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
backend_url = config.get('CELERY_RESULT_URL')
|
||||
broker_url = config.get('CELERY_BROKER_URL')
|
||||
celery_app = None
|
||||
if backend_url != None:
|
||||
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
|
||||
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
|
||||
else:
|
||||
celery_app = celery.Celery(broker=broker_url)
|
||||
logg.info('creating celery app without results backend on {}'.format(broker_url))
|
||||
|
||||
return celery_app
|
||||
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
21
apps/cic-cache/cic_cache/cli/chain.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# external imports
|
||||
from chainlib.eth.block import (
|
||||
block_by_number,
|
||||
Block,
|
||||
)
|
||||
from chainlib.eth.tx import (
|
||||
receipt,
|
||||
Tx,
|
||||
)
|
||||
from chainlib.interface import ChainInterface
|
||||
|
||||
|
||||
class EthChainInterface(ChainInterface):
|
||||
|
||||
def __init__(self):
|
||||
self._tx_receipt = receipt
|
||||
self._block_by_number = block_by_number
|
||||
self._block_from_src = Block.from_src
|
||||
self._src_normalize = Tx.src_normalize
|
||||
|
||||
chain_interface = EthChainInterface()
|
||||
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
63
apps/cic-cache/cic_cache/cli/config.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.eth.cli import (
|
||||
Config as BaseConfig,
|
||||
Flag,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from .base import CICFlag
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Config(BaseConfig):
|
||||
|
||||
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
|
||||
|
||||
@classmethod
|
||||
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
|
||||
expanded_base_config_dir = [cls.local_base_config_dir]
|
||||
if base_config_dir != None:
|
||||
if isinstance(base_config_dir, str):
|
||||
base_config_dir = [base_config_dir]
|
||||
for d in base_config_dir:
|
||||
expanded_base_config_dir.append(d)
|
||||
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
|
||||
|
||||
local_args_override = {}
|
||||
# if local_arg_flags & CICFlag.REDIS:
|
||||
# local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
|
||||
# local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
|
||||
# local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
|
||||
# local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
|
||||
|
||||
if local_arg_flags & CICFlag.CHAIN:
|
||||
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
|
||||
|
||||
if local_arg_flags & CICFlag.SYNCER:
|
||||
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
|
||||
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
|
||||
|
||||
config.dict_override(local_args_override, 'local cli args')
|
||||
|
||||
# if local_arg_flags & CICFlag.REDIS_CALLBACK:
|
||||
# config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
|
||||
# config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
|
||||
|
||||
if local_arg_flags & CICFlag.CELERY:
|
||||
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
|
||||
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
|
||||
return config
|
||||
|
||||
|
||||
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
33
apps/cic-cache/cic_cache/cli/registry.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
|
||||
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def connect_token_registry(self, conn, chain_spec, sender_address=ZERO_ADDRESS):
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
|
||||
logg.debug('using token registry address {}'.format(token_registry_address))
|
||||
lookup = TokenIndexLookup(chain_spec, token_registry_address)
|
||||
CICRegistry.add_lookup(lookup)
|
||||
|
||||
|
||||
def connect_declarator(self, conn, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||
logg.debug('using declarator address {}'.format(declarator_address))
|
||||
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
|
||||
CICRegistry.add_lookup(lookup)
|
||||
|
||||
|
||||
def connect_registry(conn, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
|
||||
CICRegistry.address = registry_address
|
||||
registry = CICRegistry(chain_spec, conn)
|
||||
registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
|
||||
return registry
|
||||
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
43
apps/cic-cache/cic_cache/cli/rpc.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from chainlib.connection import (
|
||||
RPCConnection,
|
||||
ConnType,
|
||||
)
|
||||
from chainlib.eth.connection import EthUnixSignerConnection
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RPC:
|
||||
|
||||
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
|
||||
self.chain_spec = chain_spec
|
||||
self.rpc_provider = rpc_provider
|
||||
self.signer_provider = signer_provider
|
||||
|
||||
|
||||
def get_default(self):
|
||||
return RPCConnection.connect(self.chain_spec, 'default')
|
||||
|
||||
|
||||
@staticmethod
|
||||
def from_config(config):
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
RPCConnection.register_location(config.get('RPC_PROVIDER'), chain_spec, 'default')
|
||||
if config.get('SIGNER_PROVIDER'):
|
||||
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, tag='signer')
|
||||
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, 'signer')
|
||||
rpc = RPC(chain_spec, config.get('RPC_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
|
||||
logg.info('set up rpc: {}'.format(rpc))
|
||||
return rpc
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)
|
||||
|
||||
|
||||
|
||||
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
5
apps/cic-cache/cic_cache/data/config/celery.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:6379
|
||||
result_url =
|
||||
queue = cic-cache
|
||||
debug = 0
|
||||
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
4
apps/cic-cache/cic_cache/data/config/cic.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
health_modules =
|
||||
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
10
apps/cic-cache/cic_cache/data/config/database.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[database]
|
||||
engine =
|
||||
driver =
|
||||
host =
|
||||
port =
|
||||
name = cic-cache
|
||||
user =
|
||||
password =
|
||||
debug = 0
|
||||
pool_size = 0
|
||||
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
2
apps/cic-cache/cic_cache/data/config/signer.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[signer]
|
||||
provider =
|
||||
@@ -1,3 +1,4 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
history_start = 0
|
||||
offset = 0
|
||||
no_history = 0
|
||||
@@ -13,6 +13,9 @@ def list_transactions_mined(
|
||||
session,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
@@ -23,15 +26,62 @@ def list_transactions_mined(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_mined_with_data(
|
||||
session,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
:param block_offset: First block to include in search
|
||||
:type block_offset: int
|
||||
:param block_limit: Last block to include in search
|
||||
:type block_limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
|
||||
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_mined_with_data_index(
|
||||
session,
|
||||
offset,
|
||||
end,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
@@ -42,7 +92,87 @@ def list_transactions_mined_with_data(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, offset, end)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, offset, end)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, offset, end)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
def list_transactions_account_mined_with_data_index(
|
||||
session,
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit, filtered by address
|
||||
|
||||
:param offset: Offset in data set to return transactions from
|
||||
:type offset: int
|
||||
:param limit: Max number of transactions to retrieve
|
||||
:type limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
def list_transactions_account_mined_with_data(
|
||||
session,
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
|
||||
|
||||
:param block_offset: First block to include in search
|
||||
:type block_offset: int
|
||||
:param block_limit: Last block to include in search
|
||||
:type block_limit: int
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
@@ -53,6 +183,9 @@ def list_transactions_account_mined(
|
||||
address,
|
||||
offset,
|
||||
limit,
|
||||
block_offset,
|
||||
block_limit,
|
||||
oldest=False,
|
||||
):
|
||||
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
|
||||
|
||||
@@ -65,7 +198,20 @@ def list_transactions_account_mined(
|
||||
:result: Result set
|
||||
:rtype: SQLAlchemy.ResultProxy
|
||||
"""
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
|
||||
|
||||
order_by = 'DESC'
|
||||
if oldest:
|
||||
order_by = 'ASC'
|
||||
|
||||
if block_offset:
|
||||
if block_limit:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
|
||||
|
||||
else:
|
||||
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
|
||||
|
||||
r = session.execute(s)
|
||||
return r
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2021-04-01 08:10:29.156243
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
from chainsyncer.db.migrations.default.export import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
@@ -100,3 +100,4 @@ class SessionBase(Model):
|
||||
logg.debug('destroying session {}'.format(session_key))
|
||||
session.commit()
|
||||
session.close()
|
||||
del SessionBase.localsessions[session_key]
|
||||
|
||||
@@ -4,6 +4,12 @@ import json
|
||||
import re
|
||||
import base64
|
||||
|
||||
# external imports
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
strip_0x,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from cic_cache.cache import (
|
||||
BloomCache,
|
||||
@@ -11,10 +17,12 @@ from cic_cache.cache import (
|
||||
)
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
#logg = logging.getLogger()
|
||||
|
||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
||||
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
|
||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
|
||||
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
@@ -24,15 +32,13 @@ def process_transactions_account_bloom(session, env):
|
||||
if not r:
|
||||
return None
|
||||
|
||||
address = r[1]
|
||||
if r[2] == None:
|
||||
address = '0x' + address
|
||||
offset = DEFAULT_LIMIT
|
||||
address = strip_0x(r[1])
|
||||
offset = 0
|
||||
if r.lastindex > 2:
|
||||
offset = r[3]
|
||||
limit = 0
|
||||
if r.lastindex > 3:
|
||||
limit = r[4]
|
||||
offset = r[4]
|
||||
limit = DEFAULT_LIMIT
|
||||
if r.lastindex > 4:
|
||||
limit = r[6]
|
||||
|
||||
c = BloomCache(session)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||
@@ -87,13 +93,14 @@ def process_transactions_all_data(session, env):
|
||||
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
return None
|
||||
|
||||
offset = r[1]
|
||||
end = r[2]
|
||||
logg.debug('got data request {}'.format(env))
|
||||
block_offset = r[1]
|
||||
block_end = r[2]
|
||||
if int(r[2]) < int(r[1]):
|
||||
raise ValueError('cart before the horse, dude')
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
@@ -108,3 +115,38 @@ def process_transactions_all_data(session, env):
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
|
||||
def process_transactions_account_data(session, env):
|
||||
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
return None
|
||||
|
||||
logg.debug('got data request {}'.format(env))
|
||||
address = strip_0x(r[1])
|
||||
#if r[2] == None:
|
||||
# address = add_0x(address)
|
||||
offset = 0
|
||||
if r.lastindex > 2:
|
||||
offset = r[4]
|
||||
limit = DEFAULT_LIMIT
|
||||
if r.lastindex > 4:
|
||||
limit = r[6]
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
|
||||
o = {
|
||||
'low': lowest_block,
|
||||
'high': highest_block,
|
||||
'data': tx_cache,
|
||||
}
|
||||
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
@@ -8,41 +8,31 @@ import base64
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.runnable.daemons.query import (
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_account_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_all_data,
|
||||
)
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||
|
||||
@@ -58,6 +48,7 @@ def application(env, start_response):
|
||||
process_transactions_all_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_account_data,
|
||||
]:
|
||||
r = None
|
||||
try:
|
||||
|
||||
@@ -9,6 +9,7 @@ import celery
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.tasks.tx import *
|
||||
@@ -16,35 +17,20 @@ from cic_cache.tasks.tx import *
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-q', type=str, default='cic-cache', help='queue name for worker tasks')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
# verify database connection with minimal sanity query
|
||||
#session = SessionBase.create_session()
|
||||
#session.execute('select version_num from alembic_version')
|
||||
#session.close()
|
||||
|
||||
# set up celery
|
||||
current_app = celery.Celery(__name__)
|
||||
|
||||
@@ -87,9 +73,9 @@ def main():
|
||||
elif args.v:
|
||||
argv.append('--loglevel=INFO')
|
||||
argv.append('-Q')
|
||||
argv.append(args.q)
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
argv.append('-n')
|
||||
argv.append(args.q)
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
|
||||
current_app.worker_main(argv)
|
||||
|
||||
|
||||
@@ -8,15 +8,7 @@ import sys
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import confini
|
||||
import celery
|
||||
import sqlalchemy
|
||||
import rlp
|
||||
import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainlib.chain import ChainSpec
|
||||
@@ -34,6 +26,7 @@ from chainsyncer.driver.history import HistorySyncer
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import (
|
||||
dsn_from_config,
|
||||
add_tag,
|
||||
@@ -43,32 +36,36 @@ from cic_cache.runnable.daemons.filters import (
|
||||
FaucetFilter,
|
||||
)
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
def add_block_args(argparser):
|
||||
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
|
||||
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||
return argparser
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
logg = cic_base.log.create()
|
||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
|
||||
args = cic_base.argparse.parse(argparser, logg)
|
||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||
|
||||
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
|
||||
config.add(args.no_history, '_NO_HISTORY', True)
|
||||
|
||||
cic_base.config.log(config)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
|
||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
# set up rpc
|
||||
rpc = cic_cache.cli.RPC.from_config(config)
|
||||
conn = rpc.get_default()
|
||||
|
||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||
# set up chain provisions
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
registry = None
|
||||
try:
|
||||
registry = cic_cache.cli.connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
except UnknownContractError as e:
|
||||
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||
sys.exit(1)
|
||||
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||
|
||||
|
||||
def register_filter_tags(filters, session):
|
||||
@@ -95,14 +92,12 @@ def main():
|
||||
|
||||
syncers = []
|
||||
|
||||
#if SQLBackend.first(chain_spec):
|
||||
# backend = SQLBackend.initial(chain_spec, block_offset)
|
||||
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||
|
||||
if len(syncer_backends) == 0:
|
||||
initial_block_start = config.get('SYNCER_HISTORY_START')
|
||||
initial_block_start = config.get('SYNCER_OFFSET')
|
||||
initial_block_offset = block_offset
|
||||
if config.get('_NO_HISTORY'):
|
||||
if config.get('SYNCER_NO_HISTORY'):
|
||||
initial_block_start = block_offset
|
||||
initial_block_offset += 1
|
||||
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||
@@ -112,10 +107,10 @@ def main():
|
||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HistorySyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||
|
||||
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
||||
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||
syncers.append(HeadSyncer(syncer_backend, cic_cache.cli.chain_interface))
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
from cic_cache.cache import BloomCache
|
||||
from cic_cache.cache import (
|
||||
BloomCache,
|
||||
DataCache,
|
||||
)
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
def tx_filter(self, offset, limit, address=None, oldest=False, encoding='hex'):
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
|
||||
session = SessionBase.create_session()
|
||||
@@ -17,9 +20,9 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
c = BloomCache(session)
|
||||
b = None
|
||||
if address == None:
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit, oldest=oldest)
|
||||
else:
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit, oldest=oldest)
|
||||
|
||||
session.close()
|
||||
|
||||
@@ -35,4 +38,17 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
|
||||
return o
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
def tx_filter_content(self, offset, limit, address=None, block_offset=None, block_limit=None, oldest=False, encoding='hex'):
|
||||
session = SessionBase.create_session()
|
||||
|
||||
c = DataCache(session)
|
||||
b = None
|
||||
if address == None:
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
||||
else:
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data_index(address, offset, limit, block_offset=block_offset, block_limit=block_limit)
|
||||
|
||||
session.close()
|
||||
|
||||
return (lowest_block, highest_block, tx_cache,)
|
||||
|
||||
@@ -4,7 +4,7 @@ import semver
|
||||
version = (
|
||||
0,
|
||||
2,
|
||||
0,
|
||||
1,
|
||||
'alpha.2',
|
||||
)
|
||||
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[bancor]
|
||||
dir =
|
||||
@@ -1,3 +0,0 @@
|
||||
[celery]
|
||||
broker_url = redis:///
|
||||
result_url = redis:///
|
||||
@@ -1,4 +0,0 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
chain_spec =
|
||||
trust_address =
|
||||
@@ -1,9 +0,0 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
@@ -1,3 +0,0 @@
|
||||
[bancor]
|
||||
registry_address =
|
||||
dir = /usr/local/share/bancor
|
||||
@@ -1,4 +0,0 @@
|
||||
[cic]
|
||||
chain_spec =
|
||||
registry_address =
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
@@ -1,9 +0,0 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=grassroots
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=63432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
@@ -1,2 +0,0 @@
|
||||
[eth]
|
||||
provider = ws://localhost:8545
|
||||
@@ -1,3 +0,0 @@
|
||||
[syncer]
|
||||
loop_interval = 5
|
||||
history_start = 0
|
||||
@@ -1,52 +1,32 @@
|
||||
FROM python:3.8.6-slim-buster
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
#COPY --from=0 /usr/local/share/cic/solidity/ /usr/local/share/cic/solidity/
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
|
||||
WORKDIR /usr/src/cic-cache
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
|
||||
ARG root_requirement_file='requirements.txt'
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
|
||||
#RUN apk update && \
|
||||
# apk add gcc musl-dev gnupg libpq
|
||||
#RUN apk add postgresql-dev
|
||||
#RUN apk add linux-headers
|
||||
#RUN apk add libffi-dev
|
||||
RUN apt-get update && \
|
||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
# Copy shared requirements from top of mono-repo
|
||||
RUN echo "copying root req file ${root_requirement_file}"
|
||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
COPY . .
|
||||
|
||||
COPY cic-cache/requirements.txt ./
|
||||
COPY cic-cache/setup.cfg \
|
||||
cic-cache/setup.py \
|
||||
./
|
||||
COPY cic-cache/cic_cache/ ./cic_cache/
|
||||
COPY cic-cache/scripts/ ./scripts/
|
||||
COPY cic-cache/test_requirements.txt ./
|
||||
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
RUN pip install $pip_extra_index_url_flag .
|
||||
RUN pip install .[server]
|
||||
|
||||
COPY cic-cache/tests/ ./tests/
|
||||
#COPY db/ cic-cache/db
|
||||
#RUN apk add postgresql-client
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY cic-cache/config/ /usr/local/etc/cic-cache/
|
||||
RUN python setup.py install
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
COPY ./aux/wait-for-it/wait-for-it.sh ./
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
||||
COPY cic-cache/docker/db.sh ./db.sh
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
COPY /docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./*.sh
|
||||
# Tracker
|
||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# Server
|
||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||
ENTRYPOINT []
|
||||
|
||||
10
apps/cic-cache/docker/run_tests.sh
Normal file
10
apps/cic-cache/docker/run_tests.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
cic-base==0.1.3a3+build.984b5cff
|
||||
alembic==1.4.2
|
||||
confini~=0.3.6rc3
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
uwsgi==2.0.19.1
|
||||
moolb~=0.1.0
|
||||
cic-eth-registry~=0.5.6a1
|
||||
moolb~=0.1.1b2
|
||||
cic-eth-registry~=0.6.1a5
|
||||
SQLAlchemy==1.3.20
|
||||
semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
rlp==2.0.1
|
||||
chainsyncer[sql]~=0.0.3a3
|
||||
erc20-faucet~=0.2.2a1
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
erc20-faucet>=0.3.2a2, <0.4.0
|
||||
chainlib-eth>=0.0.9a14,<0.1.0
|
||||
eth-address-index>=0.2.3a4,<0.3.0
|
||||
okota>=0.2.4a6,<0.3.0
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
from cic_cache.db import dsn_from_config
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
@@ -17,31 +22,29 @@ logg = logging.getLogger()
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
#config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||
argparser.add_argument('-f', action='store_true', help='force action')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
argparser.add_argument('-f', '--force', action='store_true', help='force action')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', type=str, help='migrations directory')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
extra_args = {
|
||||
'reset': None,
|
||||
'force': None,
|
||||
'migrations_dir': None,
|
||||
}
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
|
||||
|
||||
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
|
||||
migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE'))
|
||||
if not os.path.isdir(migrations_dir):
|
||||
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||
migrations_dir = os.path.join(args.migrations_dir, 'default')
|
||||
|
||||
@@ -23,11 +23,13 @@ licence_files =
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
include_package_data = True
|
||||
packages =
|
||||
cic_cache
|
||||
cic_cache.tasks
|
||||
cic_cache.db
|
||||
cic_cache.db.models
|
||||
cic_cache.cli
|
||||
cic_cache.runnable
|
||||
cic_cache.runnable.daemons
|
||||
cic_cache.runnable.daemons.filters
|
||||
@@ -39,3 +41,4 @@ console_scripts =
|
||||
cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main
|
||||
cic-cache-serverd = cic_cache.runnable.daemons.server:main
|
||||
cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main
|
||||
cic-cache-list = cic_cache.runable.list:main
|
||||
|
||||
@@ -6,5 +6,5 @@ sqlparse==0.4.1
|
||||
pytest-celery==0.0.0a1
|
||||
eth_tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
cic_base[full]==0.1.3a3+build.984b5cff
|
||||
sarafu-faucet~=0.0.4a1
|
||||
sarafu-faucet~=0.0.7a1
|
||||
erc20-transfer-authorization>=0.3.5a1,<0.4.0
|
||||
|
||||
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
40
apps/cic-cache/tests/cli/test_cli_args.py
Normal file
@@ -0,0 +1,40 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# external imports
|
||||
import chainlib.cli
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
config_dir = os.path.join(script_dir, '..', 'testdata', 'config')
|
||||
|
||||
|
||||
def test_argumentparserto_config():
|
||||
|
||||
argparser = cic_cache.cli.ArgumentParser()
|
||||
|
||||
local_flags = 0xffff
|
||||
argparser.process_local_flags(local_flags)
|
||||
argparser.add_argument('--foo', type=str)
|
||||
args = argparser.parse_args([
|
||||
'-q', 'baz',
|
||||
'--offset', '13',
|
||||
'--no-history',
|
||||
'-r','0xdeadbeef',
|
||||
'-vv',
|
||||
'--foo', 'bar',
|
||||
])
|
||||
|
||||
extra_args = {
|
||||
'foo': '_BARBARBAR',
|
||||
}
|
||||
config = cic_cache.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
|
||||
|
||||
assert config.get('_BARBARBAR') == 'bar'
|
||||
assert config.get('CELERY_QUEUE') == 'baz'
|
||||
assert config.get('SYNCER_NO_HISTORY') == True
|
||||
assert config.get('SYNCER_OFFSET') == 13
|
||||
assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'
|
||||
|
||||
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
17
apps/cic-cache/tests/cli/test_cli_celery.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# standard imports
|
||||
import tempfile
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
|
||||
def test_cli_celery():
|
||||
cf = tempfile.mkdtemp()
|
||||
|
||||
config = {
|
||||
'CELERY_RESULT_URL': 'filesystem://' + cf,
|
||||
}
|
||||
cic_cache.cli.CeleryApp.from_config(config)
|
||||
|
||||
config['CELERY_BROKER_URL'] = 'filesystem://' + cf
|
||||
cic_cache.cli.CeleryApp.from_config(config)
|
||||
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
68
apps/cic-cache/tests/cli/test_cli_chain.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# external imports
|
||||
import pytest
|
||||
from chainlib.eth.gas import (
|
||||
Gas,
|
||||
RPCGasOracle,
|
||||
)
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
Block,
|
||||
)
|
||||
from chainlib.eth.pytest.fixtures_chain import default_chain_spec
|
||||
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||
from hexathon import add_0x
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_cli_rpc(
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
default_chain_spec,
|
||||
):
|
||||
config = {
|
||||
'CHAIN_SPEC': str(default_chain_spec),
|
||||
'RPC_HTTP_PROVIDER': 'http://localhost:8545',
|
||||
}
|
||||
rpc = cic_cache.cli.RPC.from_config(config, default_label='foo')
|
||||
conn = rpc.get_by_label('foo')
|
||||
#o = block_latest()
|
||||
#conn.do(o)
|
||||
|
||||
|
||||
def test_cli_chain(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
contract_roles,
|
||||
):
|
||||
ifc = cic_cache.cli.EthChainInterface()
|
||||
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||
gas_oracle = RPCGasOracle(conn=eth_rpc)
|
||||
c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
|
||||
recipient = add_0x(os.urandom(20).hex())
|
||||
(tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], recipient, 1024)
|
||||
r = eth_rpc.do(o)
|
||||
|
||||
o = ifc.tx_receipt(r)
|
||||
r = eth_rpc.do(o)
|
||||
assert r['status'] == 1
|
||||
|
||||
o = ifc.block_by_number(1)
|
||||
block_src = eth_rpc.do(o)
|
||||
block = ifc.block_from_src(block_src)
|
||||
assert block.number == 1
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
assert block_src['gasUsed'] == 21000
|
||||
assert block_src['gas_used'] == 21000
|
||||
|
||||
block_src = ifc.src_normalize(block_src)
|
||||
assert block_src['gasUsed'] == 21000
|
||||
assert block_src['gas_used'] == 21000
|
||||
|
||||
@@ -5,9 +5,12 @@ import datetime
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import moolb
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
from cic_cache import BloomCache
|
||||
from cic_cache.cache import DEFAULT_FILTER_SIZE
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
@@ -61,7 +64,6 @@ def txs(
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
|
||||
tx_number = 42
|
||||
tx_hash_second = '0x' + os.urandom(32).hex()
|
||||
tx_signed_second = '0x' + os.urandom(128).hex()
|
||||
@@ -90,6 +92,44 @@ def txs(
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def more_txs(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
tx_number = 666
|
||||
tx_hash = '0x' + os.urandom(32).hex()
|
||||
tx_signed = '0x' + os.urandom(128).hex()
|
||||
nonce = 3
|
||||
|
||||
dt = datetime.datetime.utcnow()
|
||||
dt += datetime.timedelta(hours=1)
|
||||
db.add_transaction(
|
||||
session,
|
||||
tx_hash,
|
||||
list_defaults['block']+2,
|
||||
tx_number,
|
||||
list_actors['alice'],
|
||||
list_actors['diane'],
|
||||
list_tokens['bar'],
|
||||
list_tokens['bar'],
|
||||
2048,
|
||||
4096,
|
||||
False,
|
||||
dt.timestamp(),
|
||||
)
|
||||
|
||||
session.commit()
|
||||
|
||||
return [tx_hash] + txs
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def tag_txs(
|
||||
init_database,
|
||||
@@ -101,3 +141,7 @@ def tag_txs(
|
||||
|
||||
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def zero_filter():
|
||||
return moolb.Bloom(DEFAULT_FILTER_SIZE, 3)
|
||||
|
||||
@@ -10,6 +10,7 @@ from sqlalchemy import text
|
||||
from chainlib.eth.tx import Tx
|
||||
from chainlib.eth.block import Block
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.error import RequestMismatchException
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
@@ -18,10 +19,21 @@ from hexathon import (
|
||||
# local imports
|
||||
from cic_cache.db import add_tag
|
||||
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||
from cic_cache.runnable.daemons.filters.base import TagSyncFilter
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def test_base_filter_str(
|
||||
init_database,
|
||||
):
|
||||
f = TagSyncFilter('foo')
|
||||
assert 'foo' == str(f)
|
||||
f = TagSyncFilter('foo', domain='bar')
|
||||
assert 'bar.foo' == str(f)
|
||||
|
||||
|
||||
|
||||
def test_erc20_filter(
|
||||
eth_rpc,
|
||||
foo_token,
|
||||
@@ -67,3 +79,95 @@ def test_erc20_filter(
|
||||
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||
assert r[0] == tx.hash
|
||||
|
||||
|
||||
def test_erc20_filter_nocontract(
|
||||
eth_rpc,
|
||||
foo_token,
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
tags,
|
||||
):
|
||||
|
||||
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||
|
||||
fltr = ERC20TransferFilter(chain_spec)
|
||||
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||
|
||||
# incomplete args
|
||||
data = 'a9059cbb'
|
||||
data += strip_0x(list_actors['alice'])
|
||||
data += '1000'.ljust(64, '0')
|
||||
block = Block({
|
||||
'hash': os.urandom(32).hex(),
|
||||
'number': 42,
|
||||
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||
'transactions': [],
|
||||
})
|
||||
|
||||
tx = Tx({
|
||||
'to': os.urandom(20).hex(),
|
||||
'from': list_actors['bob'],
|
||||
'data': data,
|
||||
'value': 0,
|
||||
'hash': os.urandom(32).hex(),
|
||||
'nonce': 13,
|
||||
'gasPrice': 10000000,
|
||||
'gas': 123456,
|
||||
})
|
||||
block.txs.append(tx)
|
||||
tx.block = block
|
||||
|
||||
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'contract_method,contract_input,expected_exception',
|
||||
[
|
||||
('a9059cbb', os.urandom(32).hex(), ValueError), # not enough args
|
||||
('a9059cbb', os.urandom(31).hex(), ValueError), # wrong arg boundary
|
||||
('a9059cbc', os.urandom(64).hex(), RequestMismatchException), # wrong method
|
||||
],
|
||||
)
|
||||
def test_erc20_filter_bogus(
|
||||
eth_rpc,
|
||||
foo_token,
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
tags,
|
||||
contract_method,
|
||||
contract_input,
|
||||
expected_exception,
|
||||
):
|
||||
|
||||
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||
|
||||
fltr = ERC20TransferFilter(chain_spec)
|
||||
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||
|
||||
# incomplete args
|
||||
data = contract_method
|
||||
data += contract_input
|
||||
block = Block({
|
||||
'hash': os.urandom(32).hex(),
|
||||
'number': 42,
|
||||
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||
'transactions': [],
|
||||
})
|
||||
|
||||
tx = Tx({
|
||||
'to': foo_token,
|
||||
'from': list_actors['bob'],
|
||||
'data': data,
|
||||
'value': 0,
|
||||
'hash': os.urandom(32).hex(),
|
||||
'nonce': 13,
|
||||
'gasPrice': 10000000,
|
||||
'gas': 123456,
|
||||
})
|
||||
block.txs.append(tx)
|
||||
tx.block = block
|
||||
|
||||
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||
|
||||
@@ -8,6 +8,7 @@ import json
|
||||
import pytest
|
||||
|
||||
# local imports
|
||||
from cic_cache import db
|
||||
from cic_cache import BloomCache
|
||||
from cic_cache.cache import DataCache
|
||||
|
||||
@@ -18,7 +19,6 @@ def test_cache(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
):
|
||||
|
||||
@@ -37,9 +37,6 @@ def test_cache(
|
||||
|
||||
def test_cache_data(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
tag_txs,
|
||||
):
|
||||
@@ -47,10 +44,209 @@ def test_cache_data(
|
||||
session = init_database
|
||||
|
||||
c = DataCache(session)
|
||||
b = c.load_transactions_with_data(410000, 420000)
|
||||
b = c.load_transactions_with_data(0, 3) #410000, 420000) #, 100, block_offset=410000, block_limit=420000, oldest=True)
|
||||
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == txs[1]
|
||||
assert b[2][1]['tx_type'] == 'unknown'
|
||||
assert b[2][0]['tx_type'] == 'test.taag'
|
||||
assert b[2][0]['tx_hash'] == txs[0]
|
||||
assert b[2][0]['tx_type'] == 'unknown'
|
||||
assert b[2][1]['tx_type'] == 'test.taag'
|
||||
|
||||
|
||||
def test_cache_ranges(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
more_txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
|
||||
c = BloomCache(session)
|
||||
b = c.load_transactions(0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(1, 2)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions(0, 2)
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 1)
|
||||
assert b[0] == newest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
|
||||
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
|
||||
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
|
||||
|
||||
def test_cache_ranges_data(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
more_txs,
|
||||
):
|
||||
|
||||
session = init_database
|
||||
|
||||
oldest = list_defaults['block'] - 1
|
||||
mid = list_defaults['block']
|
||||
newest = list_defaults['block'] + 2
|
||||
|
||||
c = DataCache(session)
|
||||
|
||||
b = c.load_transactions_with_data(0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(1, 2)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(0, 2)
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_with_data(0, 1)
|
||||
assert b[0] == newest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
assert b[2][2]['tx_hash'] == more_txs[0]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
# now check when supplying account
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 3
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
assert b[2][2]['tx_hash'] == more_txs[2]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
|
||||
assert b[0] == oldest
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[2]
|
||||
|
||||
# add block filter to the mix
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == newest
|
||||
assert len(b[2]) == 2
|
||||
assert b[2][0]['tx_hash'] == more_txs[0]
|
||||
assert b[2][1]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == mid
|
||||
assert b[1] == mid
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[1]
|
||||
|
||||
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
|
||||
assert b[0] == oldest
|
||||
assert b[1] == oldest
|
||||
assert len(b[2]) == 1
|
||||
assert b[2][0]['tx_hash'] == more_txs[2]
|
||||
|
||||
230
apps/cic-cache/tests/test_query.py
Normal file
230
apps/cic-cache/tests/test_query.py
Normal file
@@ -0,0 +1,230 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import json
|
||||
import base64
|
||||
import copy
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from hexathon import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_cache.runnable.daemons.query import *
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re',
|
||||
[
|
||||
('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom),
|
||||
('/tx/user/', 'alice', 0, 42, 3, None, 5, re_transactions_account_bloom),
|
||||
('/tx/user/', 'alice', 0, 42, 3, 13, 5, re_transactions_account_bloom),
|
||||
('/tx/', None, 0, None, 3, None, 5, re_transactions_all_bloom),
|
||||
('/tx/', None, 0, 42, 3, None, 5, re_transactions_all_bloom),
|
||||
('/tx/', None, 0, 42, 3, 13, 5, re_transactions_all_bloom),
|
||||
('/txa/', None, 0, None, 3, None, 5, re_transactions_all_data),
|
||||
('/txa/', None, 0, 42, 3, None, 5, re_transactions_all_data),
|
||||
('/txa/', None, 0, 42, 3, 13, 5, re_transactions_all_data),
|
||||
],
|
||||
)
|
||||
def test_query_regex(
|
||||
list_actors,
|
||||
query_path_prefix,
|
||||
query_role,
|
||||
query_address_index,
|
||||
query_offset,
|
||||
query_offset_index,
|
||||
query_limit,
|
||||
query_limit_index,
|
||||
match_re,
|
||||
):
|
||||
|
||||
paths = []
|
||||
path = query_path_prefix
|
||||
query_address = None
|
||||
if query_role != None:
|
||||
query_address = strip_0x(list_actors[query_role])
|
||||
paths.append(path + '0x' + query_address)
|
||||
paths.append(path + query_address)
|
||||
if query_offset != None:
|
||||
if query_limit != None:
|
||||
for i in range(len(paths)-1):
|
||||
paths[i] += '/{}/{}'.format(query_offset, query_limit)
|
||||
else:
|
||||
for i in range(len(paths)-1):
|
||||
paths[i] += '/' + str(query_offset)
|
||||
|
||||
for i in range(len(paths)):
|
||||
paths.append(paths[i] + '/')
|
||||
|
||||
for p in paths:
|
||||
logg.debug('testing path {} against {}'.format(p, match_re))
|
||||
m = re.match(match_re, p)
|
||||
l = len(m.groups())
|
||||
logg.debug('laast index match {} groups {}'.format(m.lastindex, l))
|
||||
for i in range(l+1):
|
||||
logg.debug('group {} {}'.format(i, m[i]))
|
||||
if m.lastindex >= query_offset_index:
|
||||
assert query_offset == int(m[query_offset_index + 1])
|
||||
if m.lastindex >= query_limit_index:
|
||||
assert query_limit == int(m[query_limit_index + 1])
|
||||
if query_address_index != None:
|
||||
match_address = strip_0x(m[query_address_index + 1])
|
||||
assert query_address == match_address
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role_name, query_offset, query_limit, query_match',
|
||||
[
|
||||
('alice', None, None, [(420000, 13), (419999, 42)]),
|
||||
('alice', None, 1, [(420000, 13)]),
|
||||
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||
('alice', 2, None, []), # 420000 == list_defaults['block']
|
||||
],
|
||||
)
|
||||
def test_query_process_txs_account(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
zero_filter,
|
||||
role_name,
|
||||
query_offset,
|
||||
query_limit,
|
||||
query_match,
|
||||
):
|
||||
|
||||
actor = None
|
||||
try:
|
||||
actor = list_actors[role_name]
|
||||
except KeyError:
|
||||
actor = os.urandom(20).hex()
|
||||
path_info = '/tx/user/0x' + strip_0x(actor)
|
||||
if query_offset != None:
|
||||
path_info += '/' + str(query_offset)
|
||||
if query_limit != None:
|
||||
if query_offset == None:
|
||||
path_info += '/0'
|
||||
path_info += '/' + str(query_limit)
|
||||
env = {
|
||||
'PATH_INFO': path_info,
|
||||
}
|
||||
logg.debug('using path {}'.format(path_info))
|
||||
r = process_transactions_account_bloom(init_database, env)
|
||||
assert r != None
|
||||
|
||||
o = json.loads(r[1])
|
||||
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||
zero_filter_data = zero_filter.to_bytes()
|
||||
if len(query_match) == 0:
|
||||
assert block_filter_data == zero_filter_data
|
||||
return
|
||||
|
||||
assert block_filter_data != zero_filter_data
|
||||
block_filter = copy.copy(zero_filter)
|
||||
block_filter.merge(block_filter_data)
|
||||
block_filter_data = block_filter.to_bytes()
|
||||
assert block_filter_data != zero_filter_data
|
||||
|
||||
for (block, tx) in query_match:
|
||||
block = block.to_bytes(4, byteorder='big')
|
||||
assert block_filter.check(block)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'query_offset, query_limit, query_match',
|
||||
[
|
||||
(None, 2, [(420000, 13), (419999, 42)]),
|
||||
(0, 1, [(420000, 13)]),
|
||||
(1, 1, [(419999, 42)]),
|
||||
(2, 0, []),
|
||||
],
|
||||
)
|
||||
def test_query_process_txs_bloom(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
zero_filter,
|
||||
query_offset,
|
||||
query_limit,
|
||||
query_match,
|
||||
):
|
||||
|
||||
path_info = '/tx'
|
||||
if query_offset != None:
|
||||
path_info += '/' + str(query_offset)
|
||||
if query_limit != None:
|
||||
if query_offset == None:
|
||||
path_info += '/0'
|
||||
path_info += '/' + str(query_limit)
|
||||
env = {
|
||||
'PATH_INFO': path_info,
|
||||
}
|
||||
logg.debug('using path {}'.format(path_info))
|
||||
r = process_transactions_all_bloom(init_database, env)
|
||||
assert r != None
|
||||
|
||||
o = json.loads(r[1])
|
||||
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||
zero_filter_data = zero_filter.to_bytes()
|
||||
if len(query_match) == 0:
|
||||
assert block_filter_data == zero_filter_data
|
||||
return
|
||||
|
||||
assert block_filter_data != zero_filter_data
|
||||
block_filter = copy.copy(zero_filter)
|
||||
block_filter.merge(block_filter_data)
|
||||
block_filter_data = block_filter.to_bytes()
|
||||
assert block_filter_data != zero_filter_data
|
||||
|
||||
for (block, tx) in query_match:
|
||||
block = block.to_bytes(4, byteorder='big')
|
||||
assert block_filter.check(block)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'query_block_start, query_block_end, query_match_count',
|
||||
[
|
||||
(None, 42, 0),
|
||||
(420000, 420001, 1),
|
||||
(419999, 419999, 1), # matches are inclusive
|
||||
(419999, 420000, 2),
|
||||
(419999, 420001, 2),
|
||||
],
|
||||
)
|
||||
def test_query_process_txs_data(
|
||||
init_database,
|
||||
list_defaults,
|
||||
list_actors,
|
||||
list_tokens,
|
||||
txs,
|
||||
zero_filter,
|
||||
query_block_start,
|
||||
query_block_end,
|
||||
query_match_count,
|
||||
):
|
||||
|
||||
path_info = '/txa'
|
||||
if query_block_start != None:
|
||||
path_info += '/' + str(query_block_start)
|
||||
if query_block_end != None:
|
||||
if query_block_start == None:
|
||||
path_info += '/0'
|
||||
path_info += '/' + str(query_block_end)
|
||||
env = {
|
||||
'PATH_INFO': path_info,
|
||||
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||
}
|
||||
logg.debug('using path {}'.format(path_info))
|
||||
r = process_transactions_all_data(init_database, env)
|
||||
assert r != None
|
||||
|
||||
o = json.loads(r[1])
|
||||
assert len(o['data']) == query_match_count
|
||||
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
2
apps/cic-cache/tests/testdata/config/test.ini
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
[foo]
|
||||
bar_baz = xyzzy
|
||||
@@ -0,0 +1,53 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from erc20_demurrage_token.demurrage import DemurrageCalculator
|
||||
from chainlib.connection import RPCConnection
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from cic_eth_registry import CICRegistry
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
class NoopCalculator:
|
||||
|
||||
def amount_since(self, amount, timestamp):
|
||||
logg.debug('noopcalculator amount {} timestamp {}'.format(amount, timestamp))
|
||||
return amount
|
||||
|
||||
|
||||
class DemurrageCalculationTask(celery.Task):
|
||||
|
||||
demurrage_token_calcs = {}
|
||||
|
||||
@classmethod
|
||||
def register_token(cls, rpc, chain_spec, token_symbol, sender_address=ZERO_ADDRESS):
|
||||
registry = CICRegistry(chain_spec, rpc)
|
||||
token_address = registry.by_name(token_symbol, sender_address=sender_address)
|
||||
try:
|
||||
c = DemurrageCalculator.from_contract(rpc, chain_spec, token_address, sender_address=sender_address)
|
||||
logg.info('found demurrage calculator for ERC20 {} @ {}'.format(token_symbol, token_address))
|
||||
except:
|
||||
logg.warning('Token {} at address {} does not appear to be a demurrage contract. Calls to balance adjust for this token will always return the same amount'.format(token_symbol, token_address))
|
||||
c = NoopCalculator()
|
||||
|
||||
cls.demurrage_token_calcs[token_symbol] = c
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=DemurrageCalculationTask)
|
||||
def get_adjusted_balance(self, token_symbol, amount, timestamp):
|
||||
c = self.demurrage_token_calcs[token_symbol]
|
||||
return c.amount_since(amount, timestamp)
|
||||
|
||||
|
||||
def aux_setup(rpc, config, sender_address=ZERO_ADDRESS):
|
||||
chain_spec_str = config.get('CHAIN_SPEC')
|
||||
chain_spec = ChainSpec.from_chain_str(chain_spec_str)
|
||||
token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
|
||||
DemurrageCalculationTask.register_token(rpc, chain_spec, token_symbol, sender_address=sender_address)
|
||||
@@ -0,0 +1,30 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from cic_eth.api.base import ApiBase
|
||||
|
||||
app = celery.current_app
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Api(ApiBase):
|
||||
|
||||
def get_adjusted_balance(self, token_symbol, balance, timestamp):
|
||||
s = celery.signature(
|
||||
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||
[
|
||||
token_symbol,
|
||||
balance,
|
||||
timestamp,
|
||||
],
|
||||
queue=None,
|
||||
)
|
||||
if self.callback_param != None:
|
||||
s.link(self.callback_success)
|
||||
s.link.on_error(self.callback_error)
|
||||
|
||||
t = s.apply_async(queue=self.queue)
|
||||
return t
|
||||
5
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
5
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
celery==4.4.7
|
||||
erc20-demurrage-token~=0.0.5a3
|
||||
cic-eth-registry~=0.6.1a6
|
||||
chainlib~=0.0.9rc1
|
||||
cic_eth~=0.12.4a11
|
||||
@@ -1,14 +1,15 @@
|
||||
[metadata]
|
||||
name = cic-base
|
||||
version = attr: cic_base.version.__version_string__
|
||||
description = CIC python base
|
||||
name = cic-eth-aux-erc20-demurrage-token
|
||||
version = 0.0.2a7
|
||||
description = cic-eth tasks supporting erc20 demurrage token
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/grassrootseconomics/cic-eth
|
||||
url = https://gitlab.com/ccicnet/erc20-demurrage-token
|
||||
keywords =
|
||||
cic
|
||||
cryptocurrency
|
||||
ethereum
|
||||
blockchain
|
||||
cryptocurrency
|
||||
erc20
|
||||
classifiers =
|
||||
Programming Language :: Python :: 3
|
||||
Operating System :: OS Independent
|
||||
@@ -17,19 +18,13 @@ classifiers =
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||
Topic :: Internet
|
||||
# Topic :: Blockchain :: EVM
|
||||
#Topic :: Blockchain :: EVM
|
||||
license = GPL3
|
||||
licence_files =
|
||||
LICENSE.txt
|
||||
LICENSE
|
||||
|
||||
[options]
|
||||
include_package_data = True
|
||||
python_requires = >= 3.6
|
||||
packages =
|
||||
cic_base
|
||||
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
cic-base-audit = cic_base.runnable.audit:main
|
||||
cic-base-merge = cic_base.runnable.merge:main
|
||||
cic-base-update = cic_base.runnable.update:main
|
||||
packages =
|
||||
cic_eth_aux.erc20_demurrage_token
|
||||
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from setuptools import setup
|
||||
|
||||
requirements = []
|
||||
f = open('requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
test_requirements = []
|
||||
f = open('test_requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
test_requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
|
||||
setup(
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
)
|
||||
11
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
11
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
pytest==6.0.1
|
||||
pytest-celery==0.0.0a1
|
||||
pytest-mock==3.3.1
|
||||
pytest-cov==2.10.1
|
||||
eth-tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
SQLAlchemy==1.3.20
|
||||
liveness~=0.0.1a7
|
||||
eth-accounts-index==0.1.1a1
|
||||
eth-contract-registry==0.5.8a1
|
||||
eth-address-index==0.2.1a1
|
||||
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.eth.pytest.fixtures_chain import *
|
||||
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||
from erc20_demurrage_token.unittest.base import TestTokenDeploy
|
||||
from erc20_demurrage_token.token import DemurrageToken
|
||||
from eth_token_index.index import TokenUniqueSymbolIndex
|
||||
from eth_address_declarator.declarator import AddressDeclarator
|
||||
|
||||
# cic-eth imports
|
||||
from cic_eth.pytest.fixtures_celery import *
|
||||
from cic_eth.pytest.fixtures_token import *
|
||||
from cic_eth.pytest.fixtures_config import *
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def demurrage_token(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
token_registry,
|
||||
contract_roles,
|
||||
eth_signer,
|
||||
):
|
||||
d = TestTokenDeploy(eth_rpc, token_symbol='BAR', token_name='Bar Token')
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||
c = DemurrageToken(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||
token_address = d.deploy(eth_rpc, contract_roles['CONTRACT_DEPLOYER'], c, 'SingleNocap')
|
||||
logg.debug('demurrage token contract "BAR" deployed to {}'.format(token_address))
|
||||
|
||||
return token_address
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def demurrage_token_symbol(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
demurrage_token,
|
||||
contract_roles,
|
||||
):
|
||||
|
||||
c = DemurrageToken(default_chain_spec)
|
||||
o = c.symbol(demurrage_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||
r = eth_rpc.do(o)
|
||||
return c.parse_symbol(r)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def demurrage_token_declaration(
|
||||
foo_token_declaration,
|
||||
):
|
||||
return foo_token_declaration
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def register_demurrage_token(
|
||||
default_chain_spec,
|
||||
token_registry,
|
||||
eth_rpc,
|
||||
eth_signer,
|
||||
register_lookups,
|
||||
contract_roles,
|
||||
demurrage_token_declaration,
|
||||
demurrage_token,
|
||||
address_declarator,
|
||||
):
|
||||
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
|
||||
|
||||
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||
(tx_hash_hex, o) = c.register(token_registry, contract_roles['CONTRACT_DEPLOYER'], demurrage_token)
|
||||
eth_rpc.do(o)
|
||||
o = receipt(tx_hash_hex)
|
||||
r = eth_rpc.do(o)
|
||||
assert r['status'] == 1
|
||||
|
||||
nonce_oracle = RPCNonceOracle(contract_roles['TRUSTED_DECLARATOR'], eth_rpc)
|
||||
c = AddressDeclarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||
(tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['TRUSTED_DECLARATOR'], demurrage_token, demurrage_token_declaration)
|
||||
|
||||
eth_rpc.do(o)
|
||||
o = receipt(tx_hash_hex)
|
||||
r = eth_rpc.do(o)
|
||||
assert r['status'] == 1
|
||||
|
||||
return token_registry
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import copy
|
||||
import datetime
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
# cic-eth imports
|
||||
from cic_eth_aux.erc20_demurrage_token import (
|
||||
DemurrageCalculationTask,
|
||||
aux_setup,
|
||||
)
|
||||
from cic_eth_aux.erc20_demurrage_token.api import Api as AuxApi
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def test_demurrage_calulate_task(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
cic_registry,
|
||||
celery_session_worker,
|
||||
register_demurrage_token,
|
||||
demurrage_token_symbol,
|
||||
contract_roles,
|
||||
load_config,
|
||||
):
|
||||
|
||||
config = copy.copy(load_config)
|
||||
config.add(str(default_chain_spec), 'CIC_CHAIN_SPEC', exists_ok=True)
|
||||
config.add(demurrage_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||
aux_setup(eth_rpc, load_config, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||
|
||||
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||
s = celery.signature(
|
||||
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||
[
|
||||
demurrage_token_symbol,
|
||||
1000,
|
||||
since.timestamp(),
|
||||
],
|
||||
queue=None,
|
||||
)
|
||||
t = s.apply_async()
|
||||
r = t.get_leaf()
|
||||
assert t.successful()
|
||||
assert r == 980
|
||||
|
||||
|
||||
|
||||
def test_demurrage_calculate_api(
|
||||
default_chain_spec,
|
||||
eth_rpc,
|
||||
cic_registry,
|
||||
celery_session_worker,
|
||||
register_demurrage_token,
|
||||
demurrage_token_symbol,
|
||||
contract_roles,
|
||||
load_config,
|
||||
):
|
||||
|
||||
api = AuxApi(str(default_chain_spec), queue=None)
|
||||
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||
t = api.get_adjusted_balance(demurrage_token_symbol, 1000, since.timestamp())
|
||||
r = t.get_leaf()
|
||||
assert t.successful()
|
||||
assert r == 980
|
||||
|
||||
@@ -6,4 +6,5 @@ omit =
|
||||
cic_eth/sync/head.py
|
||||
cic_eth/sync/mempool.py
|
||||
cic_eth/queue/state.py
|
||||
cic_eth/cli
|
||||
*redis*.py
|
||||
|
||||
6
apps/cic-eth/.dockerignore
Normal file
6
apps/cic-eth/.dockerignore
Normal file
@@ -0,0 +1,6 @@
|
||||
.git
|
||||
.cache
|
||||
.dot
|
||||
**/doc
|
||||
**/.venv
|
||||
**/venv
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user