Compare commits
1 Commits
lash/verif
...
lash/funga
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7a3eac00f
|
@@ -10,7 +10,6 @@ include:
|
||||
#- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- version
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
@@ -21,39 +20,9 @@ variables:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||
CI_DEBUG_TRACE: "true"
|
||||
SEMVERBOT_VERSION: "0.2.0"
|
||||
|
||||
#before_script:
|
||||
# - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
|
||||
version:
|
||||
#image: python:3.7-stretch
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-base-images/ci-version:b01318ae
|
||||
stage: version
|
||||
script:
|
||||
- mkdir -p ~/.ssh && chmod 700 ~/.ssh
|
||||
- ssh-keyscan gitlab.com >> ~/.ssh/known_hosts && chmod 644 ~/.ssh/known_hosts
|
||||
- eval $(ssh-agent -s)
|
||||
- ssh-add <(echo "$SSH_PRIVATE_KEY")
|
||||
- git remote set-url origin git@gitlab.com:grassrootseconomics/cic-internal-integration.git
|
||||
- export TAG=$(sbot predict version -m auto)
|
||||
- |
|
||||
if [[ -z $TAG ]]
|
||||
then
|
||||
echo "tag could not be set $@"
|
||||
exit 1
|
||||
fi
|
||||
- echo $TAG > version
|
||||
- git tag -a v$TAG -m "ci tagged"
|
||||
- git push origin v$TAG
|
||||
artifacts:
|
||||
paths:
|
||||
- version
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_REF_NAME == "master"
|
||||
when: always
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
|
||||
# runs on protected branches and pushes to repo
|
||||
build-push:
|
||||
@@ -61,17 +30,12 @@ build-push:
|
||||
tags:
|
||||
- integration
|
||||
#script:
|
||||
# - TAG=$CI_Cbefore_script:
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
# - TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
|
||||
script:
|
||||
- TAG=latest ./scripts/build-push.sh
|
||||
- TAG=$(cat ./version) ./scripts/build-push.sh
|
||||
- TAG=latest sh ./scripts/build-push.sh
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_REF_NAME == "master"
|
||||
when: always
|
||||
|
||||
deploy-dev:
|
||||
stage: deploy
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
[git]
|
||||
|
||||
[git.config]
|
||||
email = "semverbot@grassroots.org"
|
||||
name = "semvervot"
|
||||
|
||||
[git.tags]
|
||||
prefix = "v"
|
||||
|
||||
[semver]
|
||||
mode = "git-commit"
|
||||
|
||||
[semver.detection]
|
||||
patch = ["fix", "[fix]", "patch", "[patch]"]
|
||||
minor = ["minor", "[minor]", "feat", "[feat]", "release", "[release]", "bump", "[bump]"]
|
||||
major = ["BREAKING CHANGE"]
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
|
||||
|
||||
|
||||
```
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
||||
@@ -1,7 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,75 +0,0 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
||||
@@ -1,182 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
||||
@@ -1,7 +0,0 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,75 +0,0 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
||||
@@ -1,182 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
@@ -1,4 +1,4 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
health_modules =
|
||||
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
|
||||
|
||||
@@ -5,10 +5,7 @@ import re
|
||||
import base64
|
||||
|
||||
# external imports
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
strip_0x,
|
||||
)
|
||||
from hexathon import add_0x
|
||||
|
||||
# local imports
|
||||
from cic_cache.cache import (
|
||||
@@ -22,7 +19,6 @@ logg = logging.getLogger(__name__)
|
||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
|
||||
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||
|
||||
DEFAULT_LIMIT = 100
|
||||
|
||||
@@ -32,7 +28,9 @@ def process_transactions_account_bloom(session, env):
|
||||
if not r:
|
||||
return None
|
||||
|
||||
address = strip_0x(r[1])
|
||||
address = r[1]
|
||||
if r[2] == None:
|
||||
address = add_0x(address)
|
||||
offset = 0
|
||||
if r.lastindex > 2:
|
||||
offset = r[4]
|
||||
@@ -115,38 +113,3 @@ def process_transactions_all_data(session, env):
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
|
||||
def process_transactions_account_data(session, env):
|
||||
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
||||
return None
|
||||
|
||||
logg.debug('got data request {}'.format(env))
|
||||
address = strip_0x(r[1])
|
||||
#if r[2] == None:
|
||||
# address = add_0x(address)
|
||||
offset = 0
|
||||
if r.lastindex > 2:
|
||||
offset = r[4]
|
||||
limit = DEFAULT_LIMIT
|
||||
if r.lastindex > 4:
|
||||
limit = r[6]
|
||||
|
||||
c = DataCache(session)
|
||||
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
|
||||
|
||||
for r in tx_cache:
|
||||
r['date_block'] = r['date_block'].timestamp()
|
||||
|
||||
o = {
|
||||
'low': lowest_block,
|
||||
'high': highest_block,
|
||||
'data': tx_cache,
|
||||
}
|
||||
|
||||
j = json.dumps(o)
|
||||
|
||||
return ('application/json', j.encode('utf-8'),)
|
||||
|
||||
@@ -8,31 +8,41 @@ import base64
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.runnable.daemons.query import (
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_account_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_all_data,
|
||||
)
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||
|
||||
@@ -48,7 +58,6 @@ def application(env, start_response):
|
||||
process_transactions_all_data,
|
||||
process_transactions_all_bloom,
|
||||
process_transactions_account_bloom,
|
||||
process_transactions_account_data,
|
||||
]:
|
||||
r = None
|
||||
try:
|
||||
|
||||
@@ -9,7 +9,6 @@ import celery
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.tasks.tx import *
|
||||
@@ -17,20 +16,35 @@ from cic_cache.tasks.tx import *
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-q', type=str, default='cic-cache', help='queue name for worker tasks')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
# verify database connection with minimal sanity query
|
||||
#session = SessionBase.create_session()
|
||||
#session.execute('select version_num from alembic_version')
|
||||
#session.close()
|
||||
|
||||
# set up celery
|
||||
current_app = celery.Celery(__name__)
|
||||
|
||||
@@ -73,9 +87,9 @@ def main():
|
||||
elif args.v:
|
||||
argv.append('--loglevel=INFO')
|
||||
argv.append('-Q')
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
argv.append(args.q)
|
||||
argv.append('-n')
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
argv.append(args.q)
|
||||
|
||||
current_app.worker_main(argv)
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
|
||||
3
apps/cic-cache/config/celery.ini
Normal file
3
apps/cic-cache/config/celery.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[celery]
|
||||
broker_url = redis:///
|
||||
result_url = redis:///
|
||||
3
apps/cic-cache/config/cic.ini
Normal file
3
apps/cic-cache/config/cic.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
9
apps/cic-cache/config/database.ini
Normal file
9
apps/cic-cache/config/database.ini
Normal file
@@ -0,0 +1,9 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
3
apps/cic-cache/config/docker/celery.ini
Normal file
3
apps/cic-cache/config/docker/celery.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:63379
|
||||
result_url = redis://localhost:63379
|
||||
3
apps/cic-cache/config/docker/cic.ini
Normal file
3
apps/cic-cache/config/docker/cic.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
9
apps/cic-cache/config/docker/database.ini
Normal file
9
apps/cic-cache/config/docker/database.ini
Normal file
@@ -0,0 +1,9 @@
|
||||
[database]
|
||||
NAME=cic_cache
|
||||
USER=grassroots
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=63432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
4
apps/cic-cache/config/docker/syncer.ini
Normal file
4
apps/cic-cache/config/docker/syncer.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[syncer]
|
||||
loop_interval = 1
|
||||
offset = 0
|
||||
no_history = 0
|
||||
@@ -1,25 +1,32 @@
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
|
||||
COPY requirements.txt .
|
||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
#RUN pip install $pip_extra_index_url_flag .
|
||||
#RUN pip install .[server]
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
COPY ./aux/wait-for-it/wait-for-it.sh ./
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
|
||||
@@ -2,7 +2,7 @@ alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
uwsgi==2.0.19.1
|
||||
moolb~=0.1.1b2
|
||||
cic-eth-registry~=0.6.1a5
|
||||
cic-eth-registry~=0.6.1a1
|
||||
SQLAlchemy==1.3.20
|
||||
semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
@@ -12,4 +12,3 @@ chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
erc20-faucet>=0.3.2a2, <0.4.0
|
||||
chainlib-eth>=0.0.9a14,<0.1.0
|
||||
eth-address-index>=0.2.3a4,<0.3.0
|
||||
okota>=0.2.4a6,<0.3.0
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# standard imports
|
||||
#!/usr/bin/python
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
|
||||
# external imports
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
from cic_cache.db import dsn_from_config
|
||||
import cic_cache.cli
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
@@ -22,29 +17,31 @@ logg = logging.getLogger()
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
|
||||
|
||||
#config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||
argparser.add_argument('-f', '--force', action='store_true', help='force action')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', type=str, help='migrations directory')
|
||||
argparser.add_argument('-f', action='store_true', help='force action')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
extra_args = {
|
||||
'reset': None,
|
||||
'force': None,
|
||||
'migrations_dir': None,
|
||||
}
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE'))
|
||||
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
|
||||
if not os.path.isdir(migrations_dir):
|
||||
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||
migrations_dir = os.path.join(args.migrations_dir, 'default')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
celery==4.4.7
|
||||
erc20-demurrage-token~=0.0.5a3
|
||||
cic-eth-registry~=0.6.1a6
|
||||
chainlib~=0.0.9rc1
|
||||
cic_eth~=0.12.4a11
|
||||
cic-eth-registry~=0.6.1a5
|
||||
chainlib~=0.0.9rc3
|
||||
cic_eth~=0.12.4a9
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = cic-eth-aux-erc20-demurrage-token
|
||||
version = 0.0.2a7
|
||||
version = 0.0.2a6
|
||||
description = cic-eth tasks supporting erc20 demurrage token
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
hexathon~=0.0.1a8
|
||||
chainqueue>=0.0.4a6,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
chainlib-eth>=0.0.10a2,<0.1.0
|
||||
|
||||
@@ -683,4 +683,3 @@ class Api(ApiBase):
|
||||
|
||||
t = self.callback_success.apply_async([r])
|
||||
return t
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import datetime
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from cic_eth_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.tx import unpack
|
||||
from chainlib.connection import RPCConnection
|
||||
|
||||
@@ -76,7 +76,7 @@ arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_task
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
#argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
||||
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
||||
@@ -84,7 +84,7 @@ args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
extra_args = {
|
||||
# 'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||
'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||
'aux_all': None,
|
||||
'aux': None,
|
||||
'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
|
||||
@@ -187,17 +187,6 @@ elif len(args.aux) > 0:
|
||||
logg.info('aux module {} found in path {}'.format(v, aux_dir))
|
||||
aux.append(v)
|
||||
|
||||
default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
defaullt_token_address = None
|
||||
if default_token_symbol:
|
||||
default_token_address = registry.by_name(default_token_symbol)
|
||||
else:
|
||||
default_token_address = registry.by_name('DefaultToken')
|
||||
c = ERC20Token(chain_spec, conn, default_token_address)
|
||||
default_token_symbol = c.symbol
|
||||
logg.info('found default token {} address {}'.format(default_token_symbol, default_token_address))
|
||||
config.add(default_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||
|
||||
for v in aux:
|
||||
mname = 'cic_eth_aux.' + v
|
||||
mod = importlib.import_module(mname)
|
||||
@@ -215,8 +204,8 @@ def main():
|
||||
argv.append('-n')
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
|
||||
BaseTask.default_token_symbol = default_token_symbol
|
||||
BaseTask.default_token_address = default_token_address
|
||||
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
||||
default_token = ERC20Token(chain_spec, conn, add_0x(BaseTask.default_token_address))
|
||||
default_token.load(conn)
|
||||
BaseTask.default_token_decimals = default_token.decimals
|
||||
|
||||
@@ -9,8 +9,8 @@ import semver
|
||||
version = (
|
||||
0,
|
||||
12,
|
||||
4,
|
||||
'alpha.14',
|
||||
5,
|
||||
'alpha.1',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth-accounts
|
||||
@section Accounts
|
||||
|
||||
Accounts are private keys in the signer component keyed by "addresses," a one-way transformation of a public key. Data can be signed by using the account as identifier for corresponding RPC requests.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@anchor{cic-eth-appendix-system-maintenance}
|
||||
@node cic-eth system maintenance
|
||||
@appendix Admin API
|
||||
|
||||
The admin API is still in an early stage of refinement. User friendliness can be considerably improved.
|
||||
@@ -33,7 +33,7 @@ Get the current state of a lock
|
||||
|
||||
@appendixsection tag_account
|
||||
|
||||
Associate an identifier with an account address (@xref{cic-eth-system-accounts})
|
||||
Associate an identifier with an account address (@xref{cic-eth system accounts})
|
||||
|
||||
@appendixsection have_account
|
||||
|
||||
|
||||
@@ -14,6 +14,5 @@ Released 2021 under GPL3
|
||||
@c
|
||||
@contents
|
||||
|
||||
@include content.texi
|
||||
@include appendix.texi
|
||||
@include index.texi
|
||||
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
@include admin.texi
|
||||
@include chains.texi
|
||||
@include transfertypes.texi
|
||||
@@ -1,4 +1,4 @@
|
||||
@anchor{cic-eth-appendix-task-chains}
|
||||
@node cic-eth Appendix Task chains
|
||||
@appendix Task chains
|
||||
|
||||
TBC - explain here how to generate these chain diagrams
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth configuration
|
||||
@section Configuration
|
||||
|
||||
Configuration parameters are grouped by configuration filename.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth-dependencies
|
||||
@section Dependencies
|
||||
|
||||
This application is written in Python 3.8. It is tightly coupled with @code{python-celery}, which provides the task worker ecosystem. It also uses @code{SQLAlchemy} which provides useful abstractions for persistent storage though SQL, and @code{alembic} for database schema migrations.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@anchor{cic-eth-incoming}
|
||||
@node cic-eth-incoming
|
||||
@section Incoming transactions
|
||||
|
||||
All transactions in mined blocks will be passed to a selection of plugin filters to the @code{chainsyncer} component. Each of these filters are individual python module files in @code{cic_eth.runnable.daemons.filters}. This section describes their function.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
@node cic-eth
|
||||
@top cic-eth
|
||||
|
||||
@include intro.texi
|
||||
@include dependencies.texi
|
||||
@include configuration.texi
|
||||
@include system.texi
|
||||
@@ -9,3 +9,6 @@
|
||||
@include incoming.texi
|
||||
@include services.texi
|
||||
@include tools.texi
|
||||
@include admin.texi
|
||||
@include chains.texi
|
||||
@include transfertypes.texi
|
||||
@@ -1,8 +1,9 @@
|
||||
@node cic-eth-interacting
|
||||
@section Interacting with the system
|
||||
|
||||
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth-appendix-task-chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
|
||||
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth Appendix Task chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
|
||||
|
||||
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth-appendix-system-maintenance,the Admin API appendix}.
|
||||
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth system maintenance,the Admin API appendix}.
|
||||
|
||||
|
||||
@subsection Interface
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth-outgoing
|
||||
@section Outgoing transactions
|
||||
|
||||
@strong{Important! A pre-requisite for proper functioning of the component is that no other agent is sending transactions to the network for any of the keys in the keystore.}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth-services
|
||||
@section Services
|
||||
|
||||
There are four daemons that together orchestrate all of the aforementioned recipes. This section will provide a high level description of them.
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
@node cic-eth system accounts
|
||||
@section System initialization
|
||||
|
||||
When the system starts for the first time, it is locked for any state change request other than account creation@footnote{Specifically, the @code{INIT}, @code{SEND} and @code{QUEUE} lock bits are set.}. These locks should be @emph{reset} once system initialization has been completed. Currently, system initialization only involves creating and tagging required system accounts, as specified below.
|
||||
|
||||
See @ref{cic-eth-locking,Locking} and @ref{cic-eth-tools-ctrl,ctrl in Tools} for details on locking.
|
||||
|
||||
@anchor{cic-eth-system-accounts}
|
||||
@subsection System accounts
|
||||
|
||||
Certain accounts in the system have special roles. These are defined by @emph{tagging} certain accounts addresses with well-known identifiers.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth-tools
|
||||
@section Tools
|
||||
|
||||
A collection of CLI tools have been provided to help with diagnostics and other administrative tasks. These use the same configuration infrastructure as the daemons.
|
||||
@@ -36,7 +37,7 @@ Execute a token transfer on behalf of a custodial account.
|
||||
|
||||
@subsection tag (cic-eth-tag)
|
||||
|
||||
Associate an account address with a string identifier. @xref{cic-eth-system-accounts}
|
||||
Associate an account address with a string identifier. @xref{cic-eth system accounts}
|
||||
|
||||
|
||||
@anchor{cic-eth-tools-ctrl}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
@node cic-eth Appendix Transaction types
|
||||
@appendix Transfer types
|
||||
|
||||
@table @var
|
||||
|
||||
@@ -1,32 +1,46 @@
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||
# do load these all into setup.py later
|
||||
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||
#COPY cic-eth/requirements.txt .
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
# pip install --index-url https://pypi.org/simple \
|
||||
# --force-reinstall \
|
||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
# -r requirements.txt
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
-r requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r admin_requirements.txt
|
||||
|
||||
|
||||
# always install the latest signer
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
crypto-dev-signer
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
@@ -39,7 +53,7 @@ RUN chmod 755 *.sh
|
||||
# # ini files in config directory defines the configurable parameters for the application
|
||||
# # they can all be overridden by environment variables
|
||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
|
||||
@@ -2,6 +2,5 @@
|
||||
|
||||
set -e
|
||||
>&2 echo executing database migration
|
||||
#python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
python scripts/migrate.py --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
set +e
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
celery==4.4.7
|
||||
chainlib-eth>=0.0.10a16,<0.1.0
|
||||
chainlib>=0.0.10a3,<0.1.0
|
||||
semver==2.13.0
|
||||
crypto-dev-signer>=0.4.15rc2,<0.5.0
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
chainqueue>=0.0.6a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.7a3,<0.1.0
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
redis==3.5.3
|
||||
@@ -8,9 +8,10 @@ pycryptodome==3.10.1
|
||||
liveness~=0.0.1a7
|
||||
eth-address-index>=0.2.4a1,<0.3.0
|
||||
eth-accounts-index>=0.1.2a3,<0.2.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
moolb~=0.1.1b2
|
||||
chainlib-eth>=0.0.10a2,<0.1.0
|
||||
okota>=0.2.4a6,<0.3.0
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
[metadata]
|
||||
name = cic-eth
|
||||
#version = attr: cic_eth.version.__version_string__
|
||||
version = 0.12.4a13
|
||||
version = attr: cic_eth.version.__version_string__
|
||||
description = CIC Network Ethereum interaction
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
||||
@@ -110,7 +110,7 @@ def test_tokens_noproof(
|
||||
custodial_roles,
|
||||
foo_token_declaration,
|
||||
bar_token_declaration,
|
||||
celery_session_worker,
|
||||
celery_worker,
|
||||
):
|
||||
|
||||
api = Api(str(default_chain_spec), queue=None, callback_param='foo')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||
chainqueue>=0.0.5a3,<0.1.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
chainqueue>=0.0.6a1,<0.1.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
pycryptodome==3.10.1
|
||||
pyxdg==0.27
|
||||
chainlib-eth>=0.0.10a2,<0.1.0
|
||||
|
||||
@@ -1,23 +1,19 @@
|
||||
FROM node:15.3.0-alpine3.10
|
||||
# syntax = docker/dockerfile:1.2
|
||||
#FROM node:15.3.0-alpine3.10
|
||||
FROM node:lts-alpine3.14
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apk add --no-cache postgresql bash
|
||||
|
||||
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
|
||||
RUN npm config set snyk=false
|
||||
#RUN npm config set registry={NPM_REPOSITORY}
|
||||
RUN npm config set registry=${NPM_REPOSITORY}
|
||||
|
||||
# copy the dependencies
|
||||
COPY package.json package-lock.json ./
|
||||
COPY package.json package-lock.json .
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
npm set cache /root/.npm && \
|
||||
npm cache verify && \
|
||||
npm ci --verbose
|
||||
npm ci
|
||||
|
||||
COPY webpack.config.js ./
|
||||
COPY tsconfig.json ./
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
## required to build the cic-client-meta module
|
||||
COPY . .
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
||||
5650
apps/cic-meta/package-lock.json
generated
5650
apps/cic-meta/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,8 @@
|
||||
create table if not exists store (
|
||||
id serial primary key not null,
|
||||
owner_fingerprint text default null,
|
||||
owner_fingerprint text not null,
|
||||
hash char(64) not null unique,
|
||||
content text not null,
|
||||
mime_type text
|
||||
content text not null
|
||||
);
|
||||
|
||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
create table if not exists store (
|
||||
/*id serial primary key not null,*/
|
||||
id integer primary key autoincrement,
|
||||
owner_fingerprint text default null,
|
||||
owner_fingerprint text not null,
|
||||
hash char(64) not null unique,
|
||||
content text not null,
|
||||
mime_type text
|
||||
content text not null
|
||||
);
|
||||
|
||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import * as Automerge from 'automerge';
|
||||
import * as pgp from 'openpgp';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
|
||||
import { Envelope, Syncable } from '@cicnet/crdt-meta';
|
||||
|
||||
|
||||
function handleNoMergeGet(db, digest, keystore) {
|
||||
const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
|
||||
return new Promise<any>((whohoo, doh) => {
|
||||
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'";
|
||||
return new Promise<string|boolean>((whohoo, doh) => {
|
||||
db.query(sql, (e, rs) => {
|
||||
if (e !== null && e !== undefined) {
|
||||
doh(e);
|
||||
@@ -17,36 +16,16 @@ function handleNoMergeGet(db, digest, keystore) {
|
||||
return;
|
||||
}
|
||||
|
||||
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
|
||||
let mimeType;
|
||||
if (immutable) {
|
||||
if (rs.rows[0]['mime_type'] === undefined) {
|
||||
mimeType = 'application/octet-stream';
|
||||
} else {
|
||||
mimeType = rs.rows[0]['mime_type'];
|
||||
}
|
||||
} else {
|
||||
mimeType = 'application/json';
|
||||
}
|
||||
|
||||
const cipherText = rs.rows[0]['content'];
|
||||
pgp.message.readArmored(cipherText).then((m) => {
|
||||
const opts = {
|
||||
message: m,
|
||||
privateKeys: [keystore.getPrivateKey()],
|
||||
format: 'binary',
|
||||
};
|
||||
pgp.decrypt(opts).then((plainText) => {
|
||||
let r;
|
||||
if (immutable) {
|
||||
r = plainText.data;
|
||||
} else {
|
||||
mimeType = 'application/json';
|
||||
const d = new TextDecoder().decode(plainText.data);
|
||||
const o = Syncable.fromJSON(d);
|
||||
r = JSON.stringify(o.m['data']);
|
||||
}
|
||||
whohoo([r, mimeType]);
|
||||
const o = Syncable.fromJSON(plainText.data);
|
||||
const r = JSON.stringify(o.m['data']);
|
||||
whohoo(r);
|
||||
}).catch((e) => {
|
||||
console.error('decrypt', e);
|
||||
doh(e);
|
||||
@@ -78,7 +57,6 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
|
||||
} else {
|
||||
e = Envelope.fromJSON(v);
|
||||
s = e.unwrap();
|
||||
console.debug('s', s, o)
|
||||
s.replace(o, 'server merge');
|
||||
e.set(s);
|
||||
s.onwrap = (e) => {
|
||||
@@ -161,13 +139,7 @@ function handleClientMergeGet(db, digest, keystore) {
|
||||
privateKeys: [keystore.getPrivateKey()],
|
||||
};
|
||||
pgp.decrypt(opts).then((plainText) => {
|
||||
let d;
|
||||
if (typeof(plainText.data) == 'string') {
|
||||
d = plainText.data;
|
||||
} else {
|
||||
d = new TextDecoder().decode(plainText.data);
|
||||
}
|
||||
const o = Syncable.fromJSON(d);
|
||||
const o = Syncable.fromJSON(plainText.data);
|
||||
const e = new Envelope(o);
|
||||
whohoo(e.toJSON());
|
||||
}).catch((e) => {
|
||||
@@ -229,65 +201,10 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function handleImmutablePost(data, db, digest, keystore, contentType) {
|
||||
return new Promise<Array<string|boolean>>((whohoo, doh) => {
|
||||
let data_binary = data;
|
||||
const h = crypto.createHash('sha256');
|
||||
h.update(data_binary);
|
||||
const z = h.digest();
|
||||
const r = bytesToHex(z);
|
||||
|
||||
if (digest) {
|
||||
if (r != digest) {
|
||||
doh('hash mismatch: ' + r + ' != ' + digest);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
digest = r;
|
||||
console.debug('calculated digest ' + digest);
|
||||
}
|
||||
|
||||
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
|
||||
if (haveDigest !== false) {
|
||||
whohoo([false, digest]);
|
||||
return;
|
||||
}
|
||||
let message;
|
||||
if (typeof(data) == 'string') {
|
||||
data_binary = new TextEncoder().encode(data);
|
||||
message = pgp.message.fromText(data);
|
||||
} else {
|
||||
message = pgp.message.fromBinary(data);
|
||||
}
|
||||
|
||||
const opts = {
|
||||
message: message,
|
||||
publicKeys: keystore.getEncryptKeys(),
|
||||
};
|
||||
pgp.encrypt(opts).then((cipherText) => {
|
||||
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
|
||||
db.query(sql, (e, rs) => {
|
||||
if (e !== null && e !== undefined) {
|
||||
doh(e);
|
||||
return;
|
||||
}
|
||||
whohoo([true, digest]);
|
||||
});
|
||||
}).catch((e) => {
|
||||
doh(e);
|
||||
});
|
||||
}).catch((e) => {
|
||||
doh(e);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
handleClientMergePut,
|
||||
handleClientMergeGet,
|
||||
handleServerMergePost,
|
||||
handleServerMergePut,
|
||||
handleNoMergeGet,
|
||||
handleImmutablePost,
|
||||
};
|
||||
|
||||
@@ -118,71 +118,37 @@ async function processRequest(req, res) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mod = req.method.toLowerCase() + ":automerge:";
|
||||
let modDetail = undefined;
|
||||
let immutablePost = false;
|
||||
try {
|
||||
digest = parseDigest(req.url);
|
||||
} catch(e) {
|
||||
if (req.url == '/') {
|
||||
immutablePost = true;
|
||||
modDetail = 'immutable';
|
||||
} else {
|
||||
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
|
||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
console.error('digest error: ' + e)
|
||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (modDetail === undefined) {
|
||||
const mergeHeader = req.headers['x-cic-automerge'];
|
||||
switch (mergeHeader) {
|
||||
case "client":
|
||||
if (immutablePost) {
|
||||
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
modDetail = "client"; // client handles merges
|
||||
break;
|
||||
case "server":
|
||||
if (immutablePost) {
|
||||
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
modDetail = "server"; // server handles merges
|
||||
break;
|
||||
case "immutable":
|
||||
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
|
||||
break;
|
||||
default:
|
||||
modDetail = "none"; // merged object only (get only)
|
||||
}
|
||||
const mergeHeader = req.headers['x-cic-automerge'];
|
||||
let mod = req.method.toLowerCase() + ":automerge:";
|
||||
switch (mergeHeader) {
|
||||
case "client":
|
||||
mod += "client"; // client handles merges
|
||||
break;
|
||||
case "server":
|
||||
mod += "server"; // server handles merges
|
||||
break;
|
||||
default:
|
||||
mod += "none"; // merged object only (get only)
|
||||
}
|
||||
mod += modDetail;
|
||||
|
||||
|
||||
// handle bigger chunks of data
|
||||
let data;
|
||||
let data = '';
|
||||
req.on('data', (d) => {
|
||||
if (data === undefined) {
|
||||
data = d;
|
||||
} else {
|
||||
data += d;
|
||||
}
|
||||
data += d;
|
||||
});
|
||||
req.on('end', async (d) => {
|
||||
let inputContentType = req.headers['content-type'];
|
||||
let debugString = 'executing mode ' + mod ;
|
||||
if (data !== undefined) {
|
||||
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
|
||||
}
|
||||
console.debug(debugString);
|
||||
let content;
|
||||
req.on('end', async () => {
|
||||
console.debug('mode', mod);
|
||||
let content = '';
|
||||
let contentType = 'application/json';
|
||||
let statusCode = 200;
|
||||
console.debug('handling data', data);
|
||||
let r:any = undefined;
|
||||
try {
|
||||
switch (mod) {
|
||||
@@ -193,7 +159,6 @@ async function processRequest(req, res) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
content = '';
|
||||
break;
|
||||
|
||||
case 'get:automerge:client':
|
||||
@@ -211,7 +176,6 @@ async function processRequest(req, res) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
content = '';
|
||||
break;
|
||||
//case 'get:automerge:server':
|
||||
// content = await handlers.handleServerMergeGet(db, digest, keystore);
|
||||
@@ -219,24 +183,12 @@ async function processRequest(req, res) {
|
||||
|
||||
case 'get:automerge:none':
|
||||
r = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
if (r === false) {
|
||||
if (r == false) {
|
||||
res.writeHead(404, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
content = r[0];
|
||||
contentType = r[1];
|
||||
break;
|
||||
|
||||
case 'post:automerge:immutable':
|
||||
if (inputContentType === undefined) {
|
||||
inputContentType = 'application/octet-stream';
|
||||
}
|
||||
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
|
||||
if (r[0]) {
|
||||
statusCode = 201;
|
||||
}
|
||||
content = r[1];
|
||||
content = r;
|
||||
break;
|
||||
|
||||
default:
|
||||
@@ -252,21 +204,14 @@ async function processRequest(req, res) {
|
||||
}
|
||||
|
||||
if (content === undefined) {
|
||||
console.error('empty content', mod, digest, data);
|
||||
console.error('empty content', data);
|
||||
res.writeHead(404, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
//let responseContentLength;
|
||||
//if (typeof(content) == 'string') {
|
||||
// (new TextEncoder().encode(content)).length;
|
||||
//}
|
||||
const responseContentLength = content.length;
|
||||
//if (responseContentLength === undefined) {
|
||||
// responseContentLength = 0;
|
||||
//}
|
||||
res.writeHead(statusCode, {
|
||||
const responseContentLength = (new TextEncoder().encode(content)).length;
|
||||
res.writeHead(200, {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Content-Type": contentType,
|
||||
"Content-Length": responseContentLength,
|
||||
|
||||
@@ -7,8 +7,6 @@ import * as handlers from '../scripts/server/handlers';
|
||||
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
|
||||
import { SqliteAdapter } from '../src/db';
|
||||
|
||||
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
|
||||
|
||||
function createKeystore() {
|
||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
|
||||
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
|
||||
@@ -46,13 +44,11 @@ function createDatabase(sqlite_file:string):Promise<any> {
|
||||
// doh(e);
|
||||
// return;
|
||||
// }
|
||||
// get this from real sql files sources
|
||||
const sql = `CREATE TABLE store (
|
||||
id integer primary key autoincrement,
|
||||
owner_fingerprint text default null,
|
||||
owner_fingerprint text not null,
|
||||
hash char(64) not null unique,
|
||||
content text not null,
|
||||
mime_type text default null
|
||||
content text not null
|
||||
);
|
||||
`
|
||||
|
||||
@@ -115,18 +111,15 @@ describe('server', async () => {
|
||||
let j = env.toJSON();
|
||||
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
assert(content); // true-ish
|
||||
console.debug('content', content);
|
||||
|
||||
let v = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
if (v === false) {
|
||||
if (v === undefined) {
|
||||
db.close();
|
||||
assert.fail('');
|
||||
}
|
||||
db.close();
|
||||
return;
|
||||
|
||||
v = await handlers.handleClientMergeGet(db, digest, keystore);
|
||||
if (v === false) {
|
||||
if (v === undefined) {
|
||||
db.close();
|
||||
assert.fail('');
|
||||
}
|
||||
@@ -194,7 +187,7 @@ describe('server', async () => {
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(v); // true-ish
|
||||
|
||||
let o = JSON.parse(j[0]);
|
||||
let o = JSON.parse(j);
|
||||
o.bar = 'xyzzy';
|
||||
j = JSON.stringify(o);
|
||||
|
||||
@@ -219,82 +212,39 @@ describe('server', async () => {
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(j); // true-ish
|
||||
o = JSON.parse(j[0]);
|
||||
o = JSON.parse(j);
|
||||
console.log(o);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
// await it('server_merge', async () => {
|
||||
// const keystore = await createKeystore();
|
||||
// const signer = new PGPSigner(keystore);
|
||||
//
|
||||
// const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
//
|
||||
// const digest = 'deadbeef';
|
||||
// let s = new Syncable(digest, {
|
||||
// bar: 'baz',
|
||||
// });
|
||||
// let env = await wrap(s, signer)
|
||||
// let j:any = env.toJSON();
|
||||
//
|
||||
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
// assert(v); // true-ish
|
||||
//
|
||||
// j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
// assert(v); // true-ish
|
||||
//
|
||||
// let o = JSON.parse(j);
|
||||
// o.bar = 'xyzzy';
|
||||
// j = JSON.stringify(o);
|
||||
//
|
||||
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
// assert(signMaterial)
|
||||
//
|
||||
// env = Envelope.fromJSON(signMaterial);
|
||||
//
|
||||
// console.log('envvvv', env);
|
||||
//
|
||||
// const signedData = await signData(env.o['digest'], keystore);
|
||||
// console.log('signed', signedData);
|
||||
//
|
||||
// o = {
|
||||
// 'm': env,
|
||||
// 's': signedData,
|
||||
// }
|
||||
// j = JSON.stringify(o);
|
||||
// console.log(j);
|
||||
//
|
||||
// v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
// assert(v);
|
||||
//
|
||||
// j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
// assert(j); // true-ish
|
||||
// o = JSON.parse(j);
|
||||
// console.log(o);
|
||||
//
|
||||
// db.close();
|
||||
// });
|
||||
//
|
||||
|
||||
|
||||
await it('server_merge_empty', async () => {
|
||||
await it('server_merge', async () => {
|
||||
const keystore = await createKeystore();
|
||||
const signer = new PGPSigner(keystore);
|
||||
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
|
||||
let o:any = {
|
||||
foo: 'bar',
|
||||
xyzzy: 42,
|
||||
}
|
||||
let j:any = JSON.stringify(o);
|
||||
const digest = 'deadbeef';
|
||||
let s = new Syncable(digest, {
|
||||
bar: 'baz',
|
||||
});
|
||||
let env = await wrap(s, signer)
|
||||
let j:any = env.toJSON();
|
||||
|
||||
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
assert(v); // true-ish
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(v); // true-ish
|
||||
|
||||
let o = JSON.parse(j);
|
||||
o.bar = 'xyzzy';
|
||||
j = JSON.stringify(o);
|
||||
|
||||
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
assert(signMaterial)
|
||||
|
||||
const env = Envelope.fromJSON(signMaterial);
|
||||
env = Envelope.fromJSON(signMaterial);
|
||||
|
||||
console.log('envvvv', env);
|
||||
|
||||
@@ -308,55 +258,58 @@ describe('server', async () => {
|
||||
j = JSON.stringify(o);
|
||||
console.log(j);
|
||||
|
||||
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
assert(v);
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(j); // true-ish
|
||||
o = JSON.parse(j[0]);
|
||||
o = JSON.parse(j);
|
||||
console.log(o);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
await it('immutable_nodigest', async() => {
|
||||
const keystore = await createKeystore();
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const s:string = 'foo';
|
||||
let r;
|
||||
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
|
||||
assert(r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
const b:Uint8Array = new TextEncoder().encode(s);
|
||||
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
});
|
||||
|
||||
await it('immutable_digest', async() => {
|
||||
const keystore = await createKeystore();
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const s:string = 'foo';
|
||||
const b:Uint8Array = new TextEncoder().encode(s);
|
||||
let r;
|
||||
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
|
||||
assert(r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
});
|
||||
// await it('server_merge_empty', async () => {
|
||||
// const keystore = await createKeystore();
|
||||
// const signer = new PGPSigner(keystore);
|
||||
//
|
||||
// const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
//
|
||||
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
|
||||
// let o:any = {
|
||||
// foo: 'bar',
|
||||
// xyzzy: 42,
|
||||
// }
|
||||
// let j:any = JSON.stringify(o);
|
||||
//
|
||||
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
// assert(signMaterial)
|
||||
//
|
||||
// const env = Envelope.fromJSON(signMaterial);
|
||||
//
|
||||
// console.log('envvvv', env);
|
||||
//
|
||||
// const signedData = await signData(env.o['digest'], keystore);
|
||||
// console.log('signed', signedData);
|
||||
//
|
||||
// o = {
|
||||
// 'm': env,
|
||||
// 's': signedData,
|
||||
// }
|
||||
// j = JSON.stringify(o);
|
||||
// console.log(j);
|
||||
//
|
||||
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
// assert(v);
|
||||
//
|
||||
// j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
// assert(j); // true-ish
|
||||
// o = JSON.parse(j);
|
||||
// console.log(o);
|
||||
//
|
||||
// db.close();
|
||||
// });
|
||||
});
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import semver
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
version = (0, 4, 0, 'alpha.11')
|
||||
version = (0, 4, 0, 'alpha.10')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
||||
@@ -1,28 +1,22 @@
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||
RUN apt-get install libffi-dev -y
|
||||
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/*.sh ./
|
||||
RUN chmod +x /root/*.sh
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x *.sh
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
confini~=0.4.1a1
|
||||
africastalking==1.2.3
|
||||
SQLAlchemy==1.3.20
|
||||
alembic==1.4.2
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
semver==2.13.0
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2 as dev
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt-get install libffi-dev -y
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
#RUN chmod +x *.sh
|
||||
@@ -1 +0,0 @@
|
||||
funga-eth[sql]>=0.5.1a1,<0.6.0
|
||||
@@ -7,7 +7,6 @@ from typing import Optional
|
||||
# third-party imports
|
||||
from cic_eth.api import Api
|
||||
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.transaction import from_wei
|
||||
@@ -103,7 +102,7 @@ def get_cached_available_balance(blockchain_address: str) -> float:
|
||||
:rtype: float
|
||||
"""
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
key = cache_data_key(identifier, salt=MetadataPointer.BALANCES)
|
||||
key = cache_data_key(identifier, salt=':cic.balances')
|
||||
cached_balances = get_cached_data(key=key)
|
||||
if cached_balances:
|
||||
return calculate_available_balance(json.loads(cached_balances))
|
||||
@@ -118,5 +117,5 @@ def get_cached_adjusted_balance(identifier: bytes):
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
||||
return get_cached_data(key)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import Optional
|
||||
import celery
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local import
|
||||
from cic_ussd.account.chain import Chain
|
||||
@@ -54,7 +53,7 @@ def get_cached_statement(blockchain_address: str) -> bytes:
|
||||
:rtype: str
|
||||
"""
|
||||
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.STATEMENT)
|
||||
key = cache_data_key(identifier=identifier, salt=':cic.statement')
|
||||
return get_cached_data(key=key)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ from typing import Dict, Optional
|
||||
|
||||
# external imports
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@@ -24,7 +23,7 @@ def get_cached_default_token(chain_str: str) -> Optional[str]:
|
||||
:rtype:
|
||||
"""
|
||||
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=MetadataPointer.TOKEN_DEFAULT)
|
||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=':cic.default_token_data')
|
||||
return get_cached_data(key=key)
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
# third-party imports
|
||||
from redis import Redis
|
||||
|
||||
logg = logging.getLogger()
|
||||
@@ -39,7 +38,7 @@ def get_cached_data(key: str):
|
||||
return cache.get(name=key)
|
||||
|
||||
|
||||
def cache_data_key(identifier: bytes, salt: MetadataPointer):
|
||||
def cache_data_key(identifier: bytes, salt: str):
|
||||
"""
|
||||
:param identifier:
|
||||
:type identifier:
|
||||
@@ -50,5 +49,5 @@ def cache_data_key(identifier: bytes, salt: MetadataPointer):
|
||||
"""
|
||||
hash_object = hashlib.new("sha256")
|
||||
hash_object.update(identifier)
|
||||
hash_object.update(salt.value.encode(encoding="utf-8"))
|
||||
hash_object.update(salt.encode(encoding="utf-8"))
|
||||
return hash_object.digest().hex()
|
||||
|
||||
@@ -3,7 +3,6 @@ import json
|
||||
|
||||
# external imports
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
||||
@@ -110,7 +109,7 @@ class Account(SessionBase):
|
||||
:rtype: str
|
||||
"""
|
||||
identifier = bytes.fromhex(self.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
account_metadata = get_cached_data(key)
|
||||
if not account_metadata:
|
||||
return self.phone_number
|
||||
|
||||
@@ -5,7 +5,6 @@ from datetime import datetime, timedelta
|
||||
|
||||
# external imports
|
||||
import i18n.config
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import (calculate_available_balance,
|
||||
@@ -164,7 +163,7 @@ class MenuProcessor:
|
||||
token_symbol = get_default_token_symbol()
|
||||
blockchain_address = self.account.blockchain_address
|
||||
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
||||
key = cache_data_key(self.identifier, MetadataPointer.BALANCES)
|
||||
key = cache_data_key(self.identifier, ':cic.balances')
|
||||
cache_data(key, json.dumps(balances))
|
||||
available_balance = calculate_available_balance(balances)
|
||||
now = datetime.now()
|
||||
@@ -174,7 +173,7 @@ class MenuProcessor:
|
||||
else:
|
||||
timestamp = int((now - timedelta(30)).timestamp())
|
||||
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
||||
key = cache_data_key(self.identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
key = cache_data_key(self.identifier, ':cic.adjusted_balance')
|
||||
cache_data(key, json.dumps(adjusted_balance))
|
||||
|
||||
query_statement(blockchain_address)
|
||||
|
||||
@@ -10,14 +10,14 @@ import i18n
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from confini import Config
|
||||
from cic_types.ext.metadata import Metadata
|
||||
from cic_types.ext.metadata.signer import Signer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.cache import Cache
|
||||
from cic_ussd.db import dsn_from_config
|
||||
from cic_ussd.db.models.base import SessionBase
|
||||
from cic_ussd.metadata.signer import Signer
|
||||
from cic_ussd.metadata.base import Metadata
|
||||
from cic_ussd.phone_number import Support
|
||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||
from cic_ussd.validator import validate_presence
|
||||
@@ -87,8 +87,11 @@ Signer.key_file_path = key_file_path
|
||||
i18n.load_path.append(config.get('LOCALE_PATH'))
|
||||
i18n.set('fallback', config.get('LOCALE_FALLBACK'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
chain_spec = ChainSpec(
|
||||
common_name=config.get('CIC_COMMON_NAME'),
|
||||
engine=config.get('CIC_ENGINE'),
|
||||
network_id=config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
|
||||
Chain.spec = chain_spec
|
||||
Support.phone_number = config.get('OFFICE_SUPPORT_PHONE')
|
||||
|
||||
@@ -12,9 +12,6 @@ import i18n
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from confini import Config
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from cic_types.ext.metadata import Metadata
|
||||
from cic_types.ext.metadata.signer import Signer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@@ -28,6 +25,8 @@ from cic_ussd.files.local_files import create_local_file_data_stores, json_file_
|
||||
from cic_ussd.http.requests import get_request_endpoint, get_request_method
|
||||
from cic_ussd.http.responses import with_content_headers
|
||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||
from cic_ussd.metadata.base import Metadata
|
||||
from cic_ussd.metadata.signer import Signer
|
||||
from cic_ussd.phone_number import process_phone_number, Support, E164Format
|
||||
from cic_ussd.processor.ussd import handle_menu_operations
|
||||
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||
@@ -97,7 +96,11 @@ celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY
|
||||
states = json_file_parser(filepath=config.get('MACHINE_STATES'))
|
||||
transitions = json_file_parser(filepath=config.get('MACHINE_TRANSITIONS'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec(
|
||||
common_name=config.get('CIC_COMMON_NAME'),
|
||||
engine=config.get('CIC_ENGINE'),
|
||||
network_id=config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
|
||||
Chain.spec = chain_spec
|
||||
UssdStateMachine.states = states
|
||||
@@ -110,7 +113,7 @@ default_token_data = query_default_token(chain_str)
|
||||
|
||||
# cache default token for re-usability
|
||||
if default_token_data:
|
||||
cache_key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||
cache_key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
||||
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||
else:
|
||||
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||
|
||||
@@ -3,10 +3,8 @@ import json
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
# external imports
|
||||
# third-party imports
|
||||
import celery
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
||||
@@ -89,7 +87,7 @@ def balances_callback(result: list, param: str, status_code: int):
|
||||
|
||||
balances = result[0]
|
||||
identifier = bytes.fromhex(param)
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
cache_data(key, json.dumps(balances))
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
# third-party imports
|
||||
import celery
|
||||
import i18n
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
@@ -50,7 +49,7 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
|
||||
statement_transactions.append(parsed_transaction)
|
||||
data = json.dumps(statement_transactions)
|
||||
identifier = bytes.fromhex(querying_party)
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
cache_data(key, data)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# standard imports
|
||||
import semver
|
||||
|
||||
version = (0, 3, 1, 'alpha.6')
|
||||
version = (0, 3, 1, 'alpha.5')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[chain]
|
||||
spec =
|
||||
@@ -1,2 +1,5 @@
|
||||
[cic]
|
||||
engine = evm
|
||||
common_name = bloxberg
|
||||
network_id = 8996
|
||||
meta_url = http://localhost:63380
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[chain]
|
||||
spec = 'evm:foo:1:bar'
|
||||
@@ -1,2 +1,5 @@
|
||||
[cic]
|
||||
engine = evm
|
||||
common_name = bloxberg
|
||||
network_id = 8996
|
||||
meta_url = http://test-meta.io
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
RUN apt-get install -y redis-server
|
||||
# create secrets directory
|
||||
RUN mkdir -vp pgp/keys
|
||||
@@ -10,34 +8,31 @@ RUN mkdir -vp pgp/keys
|
||||
RUN mkdir -vp cic-ussd
|
||||
RUN mkdir -vp data
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY cic_ussd/db/ussd_menu.json data/
|
||||
|
||||
COPY docker/*.sh ./
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x /root/*.sh
|
||||
|
||||
## copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
COPY config/ /usr/local/etc/cic-ussd/
|
||||
COPY cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic
|
||||
|
||||
|
||||
@@ -4,9 +4,9 @@ billiard==3.6.4.0
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
cffi==1.14.6
|
||||
cic-eth~=0.12.5a1
|
||||
cic-notify~=0.4.0a11
|
||||
cic-types~=0.2.1a2
|
||||
cic-eth[services]~=0.12.4a11
|
||||
cic-notify~=0.4.0a10
|
||||
cic-types~=0.2.0a3
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
phonenumbers==8.12.12
|
||||
psycopg2==2.8.6
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
cic-eth[services]~=0.12.4a13
|
||||
Faker==8.1.2
|
||||
faker-e164==0.1.0
|
||||
pytest==6.2.4
|
||||
|
||||
@@ -4,7 +4,8 @@ import time
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from cic_types.condiments import MetadataPointer
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.statement import (filter_statement_transactions,
|
||||
@@ -47,7 +48,7 @@ def test_generate(activated_account,
|
||||
generate(querying_party, None, sender_transaction)
|
||||
time.sleep(2)
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
statement = get_cached_data(key)
|
||||
statement = json.loads(statement)
|
||||
assert len(statement) == 1
|
||||
|
||||
@@ -3,7 +3,7 @@ import json
|
||||
import datetime
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import get_cached_available_balance
|
||||
@@ -58,7 +58,7 @@ def test_menu_processor(activated_account,
|
||||
token_symbol=token_symbol)
|
||||
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
||||
adjusted_balance = 45931650.64654012
|
||||
cache_data(key, json.dumps(adjusted_balance))
|
||||
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
||||
|
||||
@@ -7,7 +7,6 @@ import time
|
||||
import i18n
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@@ -46,7 +45,7 @@ def test_handle_menu(activated_account,
|
||||
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
cache_data(key, json.dumps(preferences))
|
||||
time.sleep(2)
|
||||
menu_resp = handle_menu(pending_account, init_database)
|
||||
|
||||
@@ -1,18 +1,20 @@
|
||||
# standard imports
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.statement import filter_statement_transactions
|
||||
from cic_ussd.account.statement import generate, filter_statement_transactions
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.error import AccountCreationDataNotFound
|
||||
from cic_ussd.metadata import PreferencesMetadata
|
||||
|
||||
|
||||
# test imports
|
||||
@@ -87,7 +89,7 @@ def test_balances_callback(activated_account, balances, celery_session_worker):
|
||||
[balances, activated_account.blockchain_address, status_code])
|
||||
s_balances_callback.apply_async().get()
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
cached_balances = get_cached_data(key)
|
||||
cached_balances = json.loads(cached_balances)
|
||||
assert cached_balances == balances[0]
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# standard imports
|
||||
import json
|
||||
import os
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
@@ -27,7 +27,7 @@ def test_query_person_metadata(activated_account,
|
||||
s_query_person_metadata = celery.signature(
|
||||
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
||||
s_query_person_metadata.apply().get()
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
cached_person_metadata = get_cached_data(key)
|
||||
cached_person_metadata = json.loads(cached_person_metadata)
|
||||
assert cached_person_metadata == person_metadata
|
||||
@@ -46,7 +46,7 @@ def test_query_preferences_metadata(activated_account,
|
||||
query_preferences_metadata = celery.signature(
|
||||
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
||||
query_preferences_metadata.apply().get()
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
cached_preferences_metadata = get_cached_data(key)
|
||||
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
||||
assert cached_preferences_metadata == preferences
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
@@ -39,7 +38,7 @@ def test_cache_statement(activated_account,
|
||||
transaction_result):
|
||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
cached_statement = get_cached_data(key)
|
||||
assert cached_statement is None
|
||||
s_parse_transaction = celery.signature(
|
||||
|
||||
@@ -3,7 +3,6 @@ import hashlib
|
||||
import json
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
@@ -13,7 +12,7 @@ from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
|
||||
def test_cache_data(init_cache):
|
||||
identifier = 'some_key'.encode()
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
key = cache_data_key(identifier, ':testing')
|
||||
assert get_cached_data(key) is None
|
||||
cache_data(key, json.dumps('some_value'))
|
||||
assert get_cached_data(key) is not None
|
||||
@@ -21,10 +20,10 @@ def test_cache_data(init_cache):
|
||||
|
||||
def test_cache_data_key():
|
||||
identifier = 'some_key'.encode()
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
key = cache_data_key(identifier, ':testing')
|
||||
hash_object = hashlib.new("sha256")
|
||||
hash_object.update(identifier)
|
||||
hash_object.update(':cic.person'.encode(encoding="utf-8"))
|
||||
hash_object.update(':testing'.encode(encoding="utf-8"))
|
||||
assert hash_object.digest().hex() == key
|
||||
|
||||
|
||||
|
||||
12
apps/cic-ussd/tests/fixtures/account.py
vendored
12
apps/cic-ussd/tests/fixtures/account.py
vendored
@@ -4,7 +4,7 @@ import random
|
||||
|
||||
# external accounts
|
||||
import pytest
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@@ -56,7 +56,7 @@ def cache_account_creation_data(init_cache, account_creation_data):
|
||||
def cache_balances(activated_account, balances, init_cache):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
balances = json.dumps(balances[0])
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
cache_data(key, balances)
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ def cache_balances(activated_account, balances, init_cache):
|
||||
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
chain_str = Chain.spec.__str__()
|
||||
data = json.dumps(default_token_data)
|
||||
key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||
key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
||||
cache_data(key, data)
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
person = json.dumps(person_metadata)
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
cache_data(key, person)
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||
def cache_preferences(activated_account, init_cache, preferences):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
preferences = json.dumps(preferences)
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
cache_data(key, preferences)
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ def cache_preferences(activated_account, init_cache, preferences):
|
||||
def cache_statement(activated_account, init_cache, statement):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
statement = json.dumps(statement)
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
cache_data(key, statement)
|
||||
|
||||
|
||||
|
||||
6
apps/cic-ussd/tests/fixtures/config.py
vendored
6
apps/cic-ussd/tests/fixtures/config.py
vendored
@@ -41,7 +41,11 @@ def init_state_machine(load_config):
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def load_chain_spec(load_config):
|
||||
chain_spec = ChainSpec.from_chain_str(load_config.get('CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec(
|
||||
common_name=load_config.get('CIC_COMMON_NAME'),
|
||||
engine=load_config.get('CIC_ENGINE'),
|
||||
network_id=load_config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
Chain.spec = chain_spec
|
||||
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_FEE_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_FEE_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_FEE_PRICE"
|
||||
fi
|
||||
|
||||
must_eth_rpc
|
||||
|
||||
# Deploy address declarator registry
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy address declarator contract\033[;39m"
|
||||
DEV_ADDRESS_DECLARATOR=`eth-address-declarator-deploy --nonce $nonce -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG $DEV_DECLARATOR_DESCRIPTION`
|
||||
|
||||
check_wait 1
|
||||
|
||||
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +a
|
||||
set +e
|
||||
@@ -1,76 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||
must_eth_rpc
|
||||
|
||||
if [ ! -z $DEV_FEE_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_FEE_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_FEE_PRICE"
|
||||
fi
|
||||
|
||||
|
||||
# Deploy contract registry contract
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy contract registry contract\033[;39m"
|
||||
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy --nonce $nonce $fee_price_arg -i $CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --identifier DefaultToken --address-declarator $DEV_ADDRESS_DECLARATOR -p $RPC_PROVIDER $DEV_DEBUG_FLAG -s -u -w`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd contract registry record to itself\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier ContractRegistry $CIC_REGISTRY_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd address declarator record to contract registry\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AddressDeclarator $DEV_ADDRESS_DECLARATOR`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy token index contract
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy token symbol index contract\033[;39m"
|
||||
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy --nonce $nonce -s -w -u $fee_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR`
|
||||
|
||||
>&2 echo -e "\033[;96mAdd token symbol index record to contract registry\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
check_wait 2
|
||||
|
||||
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
|
||||
set +a
|
||||
set +e
|
||||
@@ -1,161 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_FEE_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_FEE_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_FEE_PRICE"
|
||||
fi
|
||||
|
||||
have_default_token=1
|
||||
token_feedback_display_string='token'
|
||||
|
||||
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||
must_eth_rpc
|
||||
|
||||
|
||||
function _deploy_token_defaults {
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo -e "\033[;33mToken symbol not set, setting defaults for type $TOKEN_TYPE\033[;39m"
|
||||
TOKEN_SYMBOL=$1
|
||||
TOKEN_NAME=$2
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo -e "\033[;33mToken name not set, setting same as symbol for type $TOKEN_TYPE\033[;39m"
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
TOKEN_DECIMALS=${TOKEN_DECIMALS:-6}
|
||||
|
||||
debug_rpc
|
||||
default_token_registered=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw DefaultToken --fee-limit 8000000`
|
||||
if [ $default_token_registered == '0000000000000000000000000000000000000000' ]; then
|
||||
>&2 echo -e "\033[;33mFound no existing default token in token registry"
|
||||
have_default_token=''
|
||||
token_feedback_display_string='default token'
|
||||
fi
|
||||
>&2 echo -e "\033[;96mdeploying $token_feedback_display_string ..."
|
||||
>&2 echo -e "Type: $TOKEN_TYPE"
|
||||
>&2 echo -e "Name: $TOKEN_NAME"
|
||||
>&2 echo -e "Symbol: $TOKEN_SYMBOL"
|
||||
>&2 echo -e "Decimals: $TOKEN_DECIMALS\033[;39m"
|
||||
|
||||
}
|
||||
|
||||
function deploy_token_giftable_erc20_token() {
|
||||
_deploy_token_defaults "GFT" "Giftable Token"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
TOKEN_ADDRESS=`giftable-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG`
|
||||
}
|
||||
|
||||
|
||||
function deploy_token_erc20_demurrage_token() {
|
||||
_deploy_token_defaults "DET" "Demurrage Token"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
|
||||
}
|
||||
|
||||
function deploy_accounts_index() {
|
||||
# Deploy accounts index contact
|
||||
>&2 echo -e "\033[;96mDeploy accounts index contract for token $TOKEN_SYMBOL\033[;39m"
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
DEV_ACCOUNTS_INDEX_ADDRESS=`okota-accounts-index-deploy --nonce $nonce $fee_price_arg -u -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR --token-address $1`
|
||||
|
||||
if [ -z "$have_default_token" ]; then
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mAdd acccounts index record for default token to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set --nonce $nonce $DEV_WAIT_FLAG $fee_price_arg -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AccountRegistry $DEV_ACCOUNTS_INDEX_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
}
|
||||
|
||||
function deploy_minter_faucet() {
|
||||
FAUCET_AMOUNT=${FAUCET_AMOUNT:-0}
|
||||
|
||||
# Token faucet contract
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mDeploy token faucet contract for token $TOKEN_SYMBOL\033[;39m"
|
||||
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry --fee-limit 8000000`
|
||||
faucet_address=`sarafu-faucet-deploy --nonce $nonce $fee_price_arg -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --account-index-address $accounts_index_address $1`
|
||||
|
||||
# sarafu-faucet-deploy consumes TWO nonces
|
||||
advance_nonce
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mSet token faucet amount to $FAUCET_AMOUNT\033[;39m"
|
||||
r=`sarafu-faucet-set --nonce $nonce $fee_price_arg $DEV_WAIT_FLAG -s -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $faucet_address $DEV_DEBUG_FLAG --fee-limit 100000 $FAUCET_AMOUNT`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
if [ -z $have_default_token ]; then
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mRegister faucet in registry\033[;39m"
|
||||
r=`eth-contract-registry-set --nonce $nonce $DEV_WAIT_FLAG -s -u $fee_price_arg -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier Faucet $faucet_address`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mSet faucet as token minter\033[;39m"
|
||||
r=`giftable-token-minter $DEV_WAIT_FLAG --nonce $nonce -s -u $fee_price_arg -y $WALLET_KEY_FILE -e $TOKEN_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG $faucet_address`
|
||||
add_pending_tx_hash $r
|
||||
}
|
||||
|
||||
|
||||
TOKEN_TYPE=${TOKEN_TYPE:-giftable_erc20_token}
|
||||
deploy_token_${TOKEN_TYPE}
|
||||
|
||||
if [ -z "$have_default_token" ]; then
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mAdd default token to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $DEV_WAIT_FLAG --nonce $nonce $fee_price_arg -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier DefaultToken $TOKEN_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mAdd token symbol $TOKEN_SYMBOL to token address $TOKEN_ADDRESS mapping to token index\033[;39m"
|
||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
|
||||
r=`eth-token-index-add --nonce $nonce $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $token_index_address $TOKEN_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
TOKEN_MINT_AMOUNT=${TOKEN_MINT_AMOUNT:-${DEV_TOKEN_MINT_AMOUNT}}
|
||||
advance_nonce
|
||||
debug_rpc
|
||||
>&2 echo -e "\033[;96mMinting $TOKEN_MINT_AMOUNT tokens\033[;39m"
|
||||
r=`giftable-token-gift $DEV_WAIT_FLAG --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -u $DEV_DEBUG_FLAG -s -e $TOKEN_ADDRESS "$DEV_TOKEN_MINT_AMOUNT"`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Create accounts index for default token
|
||||
deploy_accounts_index $TOKEN_ADDRESS
|
||||
|
||||
# Connect a minter component if defined
|
||||
TOKEN_MINTER_MODE=${TOKEN_MINTER_MODE:-"faucet"}
|
||||
if [ -z "$TOKEN_MINTER_MODE" ]; then
|
||||
>&2 echo -e "\033[;33mNo token minter mode set.\033[;39m"
|
||||
else
|
||||
deploy_minter_${TOKEN_MINTER_MODE} $TOKEN_ADDRESS
|
||||
fi
|
||||
|
||||
check_wait 3
|
||||
|
||||
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +e
|
||||
set +a
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user