Merge remote-tracking branch 'origin/master' into lash/verify-cache
This commit is contained in:
commit
7ce68021bd
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
**/*.pyc
|
||||||
|
.pydevproject
|
||||||
|
/vendor/
|
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- "2.7"
|
||||||
|
|
||||||
|
script:
|
||||||
|
- python test/wait-for-it.py
|
||||||
|
|
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
Copyright (c) 2016 Giles Hall
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
# wait-for-it
|
||||||
|
|
||||||
|
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||||
|
host and TCP port. It is useful for synchronizing the spin-up of
|
||||||
|
interdependent services, such as linked docker containers. Since it is a pure
|
||||||
|
bash script, it does not have any external dependencies.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```text
|
||||||
|
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||||
|
-h HOST | --host=HOST Host or IP under test
|
||||||
|
-p PORT | --port=PORT TCP port under test
|
||||||
|
Alternatively, you specify the host and port as host:port
|
||||||
|
-s | --strict Only execute subcommand if the test succeeds
|
||||||
|
-q | --quiet Don't output any status messages
|
||||||
|
-t TIMEOUT | --timeout=TIMEOUT
|
||||||
|
Timeout in seconds, zero for no timeout
|
||||||
|
-- COMMAND ARGS Execute command with args after the test finishes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||||
|
and if it is available, echo the message `google is up`.
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
google is up
|
||||||
|
```
|
||||||
|
|
||||||
|
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||||
|
the timeout value to 0 will disable the timeout:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
google is up
|
||||||
|
```
|
||||||
|
|
||||||
|
The subcommand will be executed regardless if the service is up or not. If you
|
||||||
|
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||||
|
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||||
|
fail:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||||
|
```
|
||||||
|
|
||||||
|
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||||
|
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||||
|
and determine how to proceed:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:80
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
$ echo $?
|
||||||
|
0
|
||||||
|
$ ./wait-for-it.sh www.google.com:81
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||||
|
$ echo $?
|
||||||
|
124
|
||||||
|
```
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Use this script to test if a given TCP host/port are available
|
||||||
|
|
||||||
|
WAITFORIT_cmdname=${0##*/}
|
||||||
|
|
||||||
|
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||||
|
|
||||||
|
usage()
|
||||||
|
{
|
||||||
|
cat << USAGE >&2
|
||||||
|
Usage:
|
||||||
|
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||||
|
-h HOST | --host=HOST Host or IP under test
|
||||||
|
-p PORT | --port=PORT TCP port under test
|
||||||
|
Alternatively, you specify the host and port as host:port
|
||||||
|
-s | --strict Only execute subcommand if the test succeeds
|
||||||
|
-q | --quiet Don't output any status messages
|
||||||
|
-t TIMEOUT | --timeout=TIMEOUT
|
||||||
|
Timeout in seconds, zero for no timeout
|
||||||
|
-- COMMAND ARGS Execute command with args after the test finishes
|
||||||
|
USAGE
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for()
|
||||||
|
{
|
||||||
|
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||||
|
else
|
||||||
|
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||||
|
fi
|
||||||
|
WAITFORIT_start_ts=$(date +%s)
|
||||||
|
while :
|
||||||
|
do
|
||||||
|
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||||
|
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||||
|
WAITFORIT_result=$?
|
||||||
|
else
|
||||||
|
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||||
|
WAITFORIT_result=$?
|
||||||
|
fi
|
||||||
|
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||||
|
WAITFORIT_end_ts=$(date +%s)
|
||||||
|
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
return $WAITFORIT_result
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_wrapper()
|
||||||
|
{
|
||||||
|
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||||
|
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||||
|
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||||
|
else
|
||||||
|
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||||
|
fi
|
||||||
|
WAITFORIT_PID=$!
|
||||||
|
trap "kill -INT -$WAITFORIT_PID" INT
|
||||||
|
wait $WAITFORIT_PID
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||||
|
fi
|
||||||
|
return $WAITFORIT_RESULT
|
||||||
|
}
|
||||||
|
|
||||||
|
# process arguments
|
||||||
|
while [[ $# -gt 0 ]]
|
||||||
|
do
|
||||||
|
case "$1" in
|
||||||
|
*:* )
|
||||||
|
WAITFORIT_hostport=(${1//:/ })
|
||||||
|
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||||
|
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
--child)
|
||||||
|
WAITFORIT_CHILD=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-q | --quiet)
|
||||||
|
WAITFORIT_QUIET=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-s | --strict)
|
||||||
|
WAITFORIT_STRICT=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-h)
|
||||||
|
WAITFORIT_HOST="$2"
|
||||||
|
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--host=*)
|
||||||
|
WAITFORIT_HOST="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-p)
|
||||||
|
WAITFORIT_PORT="$2"
|
||||||
|
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--port=*)
|
||||||
|
WAITFORIT_PORT="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-t)
|
||||||
|
WAITFORIT_TIMEOUT="$2"
|
||||||
|
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--timeout=*)
|
||||||
|
WAITFORIT_TIMEOUT="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
WAITFORIT_CLI=("$@")
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
--help)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echoerr "Unknown argument: $1"
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||||
|
echoerr "Error: you need to provide a host and port to test."
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||||
|
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||||
|
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||||
|
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||||
|
|
||||||
|
# Check to see if timeout is from busybox?
|
||||||
|
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||||
|
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||||
|
|
||||||
|
WAITFORIT_BUSYTIMEFLAG=""
|
||||||
|
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||||
|
WAITFORIT_ISBUSY=1
|
||||||
|
# Check if busybox timeout uses -t flag
|
||||||
|
# (recent Alpine versions don't support -t anymore)
|
||||||
|
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||||
|
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
WAITFORIT_ISBUSY=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||||
|
wait_for
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
else
|
||||||
|
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||||
|
wait_for_wrapper
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
else
|
||||||
|
wait_for
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||||
|
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
fi
|
||||||
|
exec "${WAITFORIT_CLI[@]}"
|
||||||
|
else
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
fi
|
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
**/*.pyc
|
||||||
|
.pydevproject
|
||||||
|
/vendor/
|
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- "2.7"
|
||||||
|
|
||||||
|
script:
|
||||||
|
- python test/wait-for-it.py
|
||||||
|
|
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
Copyright (c) 2016 Giles Hall
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
# wait-for-it
|
||||||
|
|
||||||
|
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||||
|
host and TCP port. It is useful for synchronizing the spin-up of
|
||||||
|
interdependent services, such as linked docker containers. Since it is a pure
|
||||||
|
bash script, it does not have any external dependencies.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```text
|
||||||
|
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||||
|
-h HOST | --host=HOST Host or IP under test
|
||||||
|
-p PORT | --port=PORT TCP port under test
|
||||||
|
Alternatively, you specify the host and port as host:port
|
||||||
|
-s | --strict Only execute subcommand if the test succeeds
|
||||||
|
-q | --quiet Don't output any status messages
|
||||||
|
-t TIMEOUT | --timeout=TIMEOUT
|
||||||
|
Timeout in seconds, zero for no timeout
|
||||||
|
-- COMMAND ARGS Execute command with args after the test finishes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||||
|
and if it is available, echo the message `google is up`.
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
google is up
|
||||||
|
```
|
||||||
|
|
||||||
|
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||||
|
the timeout value to 0 will disable the timeout:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
google is up
|
||||||
|
```
|
||||||
|
|
||||||
|
The subcommand will be executed regardless if the service is up or not. If you
|
||||||
|
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||||
|
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||||
|
fail:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||||
|
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||||
|
```
|
||||||
|
|
||||||
|
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||||
|
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||||
|
and determine how to proceed:
|
||||||
|
|
||||||
|
```text
|
||||||
|
$ ./wait-for-it.sh www.google.com:80
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||||
|
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||||
|
$ echo $?
|
||||||
|
0
|
||||||
|
$ ./wait-for-it.sh www.google.com:81
|
||||||
|
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||||
|
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||||
|
$ echo $?
|
||||||
|
124
|
||||||
|
```
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Use this script to test if a given TCP host/port are available
|
||||||
|
|
||||||
|
WAITFORIT_cmdname=${0##*/}
|
||||||
|
|
||||||
|
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||||
|
|
||||||
|
usage()
|
||||||
|
{
|
||||||
|
cat << USAGE >&2
|
||||||
|
Usage:
|
||||||
|
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||||
|
-h HOST | --host=HOST Host or IP under test
|
||||||
|
-p PORT | --port=PORT TCP port under test
|
||||||
|
Alternatively, you specify the host and port as host:port
|
||||||
|
-s | --strict Only execute subcommand if the test succeeds
|
||||||
|
-q | --quiet Don't output any status messages
|
||||||
|
-t TIMEOUT | --timeout=TIMEOUT
|
||||||
|
Timeout in seconds, zero for no timeout
|
||||||
|
-- COMMAND ARGS Execute command with args after the test finishes
|
||||||
|
USAGE
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for()
|
||||||
|
{
|
||||||
|
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||||
|
else
|
||||||
|
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||||
|
fi
|
||||||
|
WAITFORIT_start_ts=$(date +%s)
|
||||||
|
while :
|
||||||
|
do
|
||||||
|
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||||
|
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||||
|
WAITFORIT_result=$?
|
||||||
|
else
|
||||||
|
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||||
|
WAITFORIT_result=$?
|
||||||
|
fi
|
||||||
|
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||||
|
WAITFORIT_end_ts=$(date +%s)
|
||||||
|
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
return $WAITFORIT_result
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_wrapper()
|
||||||
|
{
|
||||||
|
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||||
|
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||||
|
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||||
|
else
|
||||||
|
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||||
|
fi
|
||||||
|
WAITFORIT_PID=$!
|
||||||
|
trap "kill -INT -$WAITFORIT_PID" INT
|
||||||
|
wait $WAITFORIT_PID
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||||
|
fi
|
||||||
|
return $WAITFORIT_RESULT
|
||||||
|
}
|
||||||
|
|
||||||
|
# process arguments
|
||||||
|
while [[ $# -gt 0 ]]
|
||||||
|
do
|
||||||
|
case "$1" in
|
||||||
|
*:* )
|
||||||
|
WAITFORIT_hostport=(${1//:/ })
|
||||||
|
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||||
|
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
--child)
|
||||||
|
WAITFORIT_CHILD=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-q | --quiet)
|
||||||
|
WAITFORIT_QUIET=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-s | --strict)
|
||||||
|
WAITFORIT_STRICT=1
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-h)
|
||||||
|
WAITFORIT_HOST="$2"
|
||||||
|
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--host=*)
|
||||||
|
WAITFORIT_HOST="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-p)
|
||||||
|
WAITFORIT_PORT="$2"
|
||||||
|
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--port=*)
|
||||||
|
WAITFORIT_PORT="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
-t)
|
||||||
|
WAITFORIT_TIMEOUT="$2"
|
||||||
|
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--timeout=*)
|
||||||
|
WAITFORIT_TIMEOUT="${1#*=}"
|
||||||
|
shift 1
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
WAITFORIT_CLI=("$@")
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
--help)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echoerr "Unknown argument: $1"
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||||
|
echoerr "Error: you need to provide a host and port to test."
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||||
|
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||||
|
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||||
|
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||||
|
|
||||||
|
# Check to see if timeout is from busybox?
|
||||||
|
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||||
|
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||||
|
|
||||||
|
WAITFORIT_BUSYTIMEFLAG=""
|
||||||
|
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||||
|
WAITFORIT_ISBUSY=1
|
||||||
|
# Check if busybox timeout uses -t flag
|
||||||
|
# (recent Alpine versions don't support -t anymore)
|
||||||
|
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||||
|
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
WAITFORIT_ISBUSY=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||||
|
wait_for
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
else
|
||||||
|
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||||
|
wait_for_wrapper
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
else
|
||||||
|
wait_for
|
||||||
|
WAITFORIT_RESULT=$?
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||||
|
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||||
|
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
fi
|
||||||
|
exec "${WAITFORIT_CLI[@]}"
|
||||||
|
else
|
||||||
|
exit $WAITFORIT_RESULT
|
||||||
|
fi
|
@ -8,6 +8,7 @@ import base64
|
|||||||
import confini
|
import confini
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
from cic_cache.runnable.daemons.query import (
|
from cic_cache.runnable.daemons.query import (
|
||||||
@ -23,7 +24,7 @@ logg = logging.getLogger()
|
|||||||
|
|
||||||
|
|
||||||
arg_flags = cic_cache.cli.argflag_std_read
|
arg_flags = cic_cache.cli.argflag_std_read
|
||||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
|
||||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||||
argparser.process_local_flags(local_arg_flags)
|
argparser.process_local_flags(local_arg_flags)
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
@ -31,6 +32,7 @@ args = argparser.parse_args()
|
|||||||
# process config
|
# process config
|
||||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||||
|
|
||||||
|
# connect to database
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import celery
|
|||||||
import confini
|
import confini
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
import cic_cache.cli
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
from cic_cache.tasks.tx import *
|
from cic_cache.tasks.tx import *
|
||||||
@ -16,35 +17,20 @@ from cic_cache.tasks.tx import *
|
|||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
# process args
|
||||||
|
arg_flags = cic_cache.cli.argflag_std_base
|
||||||
|
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
argparser.process_local_flags(local_arg_flags)
|
||||||
argparser.add_argument('-q', type=str, default='cic-cache', help='queue name for worker tasks')
|
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
|
||||||
|
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
|
|
||||||
if args.vv:
|
# process config
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||||
elif args.v:
|
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
|
||||||
|
|
||||||
config = confini.Config(args.c, args.env_prefix)
|
|
||||||
config.process()
|
|
||||||
|
|
||||||
# connect to database
|
# connect to database
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn)
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
# verify database connection with minimal sanity query
|
|
||||||
#session = SessionBase.create_session()
|
|
||||||
#session.execute('select version_num from alembic_version')
|
|
||||||
#session.close()
|
|
||||||
|
|
||||||
# set up celery
|
# set up celery
|
||||||
current_app = celery.Celery(__name__)
|
current_app = celery.Celery(__name__)
|
||||||
|
|
||||||
@ -87,9 +73,9 @@ def main():
|
|||||||
elif args.v:
|
elif args.v:
|
||||||
argv.append('--loglevel=INFO')
|
argv.append('--loglevel=INFO')
|
||||||
argv.append('-Q')
|
argv.append('-Q')
|
||||||
argv.append(args.q)
|
argv.append(config.get('CELERY_QUEUE'))
|
||||||
argv.append('-n')
|
argv.append('-n')
|
||||||
argv.append(args.q)
|
argv.append(config.get('CELERY_QUEUE'))
|
||||||
|
|
||||||
current_app.worker_main(argv)
|
current_app.worker_main(argv)
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ logging.basicConfig(level=logging.WARNING)
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
# process args
|
# process args
|
||||||
arg_flags = cic_cache.cli.argflag_std_read
|
arg_flags = cic_cache.cli.argflag_std_base
|
||||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||||
argparser.process_local_flags(local_arg_flags)
|
argparser.process_local_flags(local_arg_flags)
|
||||||
|
@ -1,33 +1,28 @@
|
|||||||
# syntax = docker/dockerfile:1.2
|
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
|
||||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
|
||||||
|
|
||||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||||
|
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
|
||||||
#RUN pip install $pip_extra_index_url_flag .
|
|
||||||
#RUN pip install .[server]
|
|
||||||
|
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
RUN apt-get install libffi-dev -y
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
|
||||||
|
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
ARG EXTRA_PIP_ARGS=""
|
ARG EXTRA_PIP_ARGS=""
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||||
pip install --index-url https://pypi.org/simple \
|
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
|
--pre \
|
||||||
|
--force-reinstall \
|
||||||
|
--no-cache \
|
||||||
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN python setup.py install
|
RUN python setup.py install
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
|
||||||
# they can all be overridden by environment variables
|
|
||||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
|
||||||
#COPY config/ /usr/local/etc/cic-cache/
|
|
||||||
COPY config/ .
|
|
||||||
|
|
||||||
# for db migrations
|
# for db migrations
|
||||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
COPY ./aux/wait-for-it/wait-for-it.sh ./
|
||||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
erc20-demurrage-token~=0.0.3a1
|
erc20-demurrage-token~=0.0.5a3
|
||||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
cic-eth-registry~=0.6.1a6
|
||||||
cic-eth[services]~=0.12.4a8
|
chainlib~=0.0.9rc1
|
||||||
|
cic_eth~=0.12.4a11
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name = cic-eth-aux-erc20-demurrage-token
|
name = cic-eth-aux-erc20-demurrage-token
|
||||||
version = 0.0.2a6
|
version = 0.0.2a7
|
||||||
description = cic-eth tasks supporting erc20 demurrage token
|
description = cic-eth tasks supporting erc20 demurrage token
|
||||||
author = Louis Holbrook
|
author = Louis Holbrook
|
||||||
author_email = dev@holbrook.no
|
author_email = dev@holbrook.no
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
SQLAlchemy==1.3.20
|
SQLAlchemy==1.3.20
|
||||||
cic-eth-registry>=0.6.1a3,<0.7.0
|
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||||
hexathon~=0.0.1a8
|
hexathon~=0.0.1a8
|
||||||
chainqueue>=0.0.4a6,<0.1.0
|
chainqueue>=0.0.4a6,<0.1.0
|
||||||
eth-erc20>=0.1.2a2,<0.2.0
|
eth-erc20>=0.1.2a2,<0.2.0
|
||||||
|
@ -1,21 +1,2 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# external imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.task import BaseTask
|
from cic_eth.eth.erc20 import default_token
|
||||||
|
|
||||||
celery_app = celery.current_app
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=BaseTask)
|
|
||||||
def default_token(self):
|
|
||||||
return {
|
|
||||||
'symbol': self.default_token_symbol,
|
|
||||||
'address': self.default_token_address,
|
|
||||||
'name': self.default_token_name,
|
|
||||||
'decimals': self.default_token_decimals,
|
|
||||||
}
|
|
||||||
|
@ -17,15 +17,50 @@ from cic_eth.enum import LockEnum
|
|||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
#logg = logging.getLogger(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class Api(ApiBase):
|
class Api(ApiBase):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def to_v_list(v, n):
|
||||||
|
"""Translate an arbitrary number of string and/or list arguments to a list of list of string arguments
|
||||||
|
|
||||||
|
:param v: Arguments
|
||||||
|
:type v: str or list
|
||||||
|
:param n: Number of elements to generate arguments for
|
||||||
|
:type n: int
|
||||||
|
:rtype: list
|
||||||
|
:returns: list of assembled arguments
|
||||||
|
"""
|
||||||
|
if isinstance(v, str):
|
||||||
|
vv = v
|
||||||
|
v = []
|
||||||
|
for i in range(n):
|
||||||
|
v.append([vv])
|
||||||
|
elif not isinstance(v, list):
|
||||||
|
raise ValueError('argument must be single string, or list or strings or lists')
|
||||||
|
else:
|
||||||
|
if len(v) != n:
|
||||||
|
raise ValueError('v argument count must match integer n')
|
||||||
|
for i in range(n):
|
||||||
|
if isinstance(v[i], str):
|
||||||
|
v[i] = [v[i]]
|
||||||
|
elif not isinstance(v, list):
|
||||||
|
raise ValueError('proof argument must be single string, or list or strings or lists')
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
def default_token(self):
|
def default_token(self):
|
||||||
|
"""Retrieves the default fallback token of the custodial network.
|
||||||
|
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
s_token = celery.signature(
|
s_token = celery.signature(
|
||||||
'cic_eth.admin.token.default_token',
|
'cic_eth.eth.erc20.default_token',
|
||||||
[],
|
[],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -35,6 +70,97 @@ class Api(ApiBase):
|
|||||||
return s_token.apply_async()
|
return s_token.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def token(self, token_symbol, proof=None):
|
||||||
|
"""Single-token alias for tokens method.
|
||||||
|
|
||||||
|
See tokens method for details.
|
||||||
|
|
||||||
|
:param token_symbol: Token symbol to look up
|
||||||
|
:type token_symbol: str
|
||||||
|
:param proof: Proofs to add to signature verification for the token
|
||||||
|
:type proof: str or list
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
if not isinstance(token_symbol, str):
|
||||||
|
raise ValueError('token symbol must be string')
|
||||||
|
|
||||||
|
return self.tokens([token_symbol], proof=proof)
|
||||||
|
|
||||||
|
|
||||||
|
def tokens(self, token_symbols, proof=None):
|
||||||
|
"""Perform a token data lookup from the token index. The token index will enforce unique associations between token symbol and contract address.
|
||||||
|
|
||||||
|
Token symbols are always strings, and should be specified using uppercase letters.
|
||||||
|
|
||||||
|
If the proof argument is included, the network will be queried for trusted signatures on the given proof(s). There must exist at least one trusted signature for every given proof for every token. Trusted signatures for the custodial system are provided at service startup.
|
||||||
|
|
||||||
|
The proof argument may be specified in a number of ways:
|
||||||
|
|
||||||
|
- as None, in which case proof checks are skipped (although there may still be builtin proof checks being performed)
|
||||||
|
- as a single string, where the same proof is used for each token lookup
|
||||||
|
- as an array of strings, where the respective proof is used for the respective token. number of proofs must match the number of tokens.
|
||||||
|
- as an array of lists, where the respective proofs in each list is used for the respective token. number of lists of proofs must match the number of tokens.
|
||||||
|
|
||||||
|
The success callback provided at the Api object instantiation will receive individual calls for each token that passes the proof checks. Each token that does not pass is passed to the Api error callback.
|
||||||
|
|
||||||
|
This method is not intended to be used synchronously. Do so at your peril.
|
||||||
|
|
||||||
|
:param token_symbols: Token symbol strings to look up
|
||||||
|
:type token_symbol: list
|
||||||
|
:param proof: Proof(s) to verify tokens against
|
||||||
|
:type proof: None, str or list
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
if not isinstance(token_symbols, list):
|
||||||
|
raise ValueError('token symbols argument must be list')
|
||||||
|
|
||||||
|
if proof == None:
|
||||||
|
logg.debug('looking up tokens without external proof check: {}'.format(','.join(token_symbols)))
|
||||||
|
proof = ''
|
||||||
|
|
||||||
|
logg.debug('proof is {}'.format(proof))
|
||||||
|
l = len(token_symbols)
|
||||||
|
if len(proof) == 0:
|
||||||
|
l = 0
|
||||||
|
proof = Api.to_v_list(proof, l)
|
||||||
|
|
||||||
|
chain_spec_dict = self.chain_spec.asdict()
|
||||||
|
|
||||||
|
s_token_resolve = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
token_symbols,
|
||||||
|
chain_spec_dict,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_token_info = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.token_info',
|
||||||
|
[
|
||||||
|
chain_spec_dict,
|
||||||
|
proof,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_token_verify = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.verify_token_info',
|
||||||
|
[
|
||||||
|
chain_spec_dict,
|
||||||
|
self.callback_success,
|
||||||
|
self.callback_error,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_token_info.link(s_token_verify)
|
||||||
|
s_token_resolve.link(s_token_info)
|
||||||
|
return s_token_resolve.apply_async()
|
||||||
|
|
||||||
|
|
||||||
# def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
# def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
||||||
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
|
# """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
|
||||||
#
|
#
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
import celery
|
import celery
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = celery_app.log.get_default_logger()
|
#logg = celery_app.log.get_default_logger()
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True)
|
@celery_app.task(bind=True)
|
||||||
|
@ -48,8 +48,6 @@ class RoleMissingError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(Exception):
|
class IntegrityError(Exception):
|
||||||
"""Exception raised to signal irregularities with deduplication and ordering of tasks
|
"""Exception raised to signal irregularities with deduplication and ordering of tasks
|
||||||
|
|
||||||
@ -85,3 +83,8 @@ class RoleAgencyError(SeppukuError):
|
|||||||
class YouAreBrokeError(Exception):
|
class YouAreBrokeError(Exception):
|
||||||
"""Exception raised when a value transfer is attempted without access to sufficient funds
|
"""Exception raised when a value transfer is attempted without access to sufficient funds
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TrustError(Exception):
|
||||||
|
"""Exception raised when required trust proofs are missing for a request
|
||||||
|
"""
|
||||||
|
@ -19,6 +19,7 @@ from hexathon import (
|
|||||||
from chainqueue.error import NotLocalTxError
|
from chainqueue.error import NotLocalTxError
|
||||||
from eth_erc20 import ERC20
|
from eth_erc20 import ERC20
|
||||||
from chainqueue.sql.tx import cache_tx_dict
|
from chainqueue.sql.tx import cache_tx_dict
|
||||||
|
from okota.token_index import to_identifier
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
@ -39,9 +40,11 @@ from cic_eth.task import (
|
|||||||
CriticalSQLAlchemyTask,
|
CriticalSQLAlchemyTask,
|
||||||
CriticalWeb3Task,
|
CriticalWeb3Task,
|
||||||
CriticalSQLAlchemyAndSignerTask,
|
CriticalSQLAlchemyAndSignerTask,
|
||||||
|
BaseTask,
|
||||||
)
|
)
|
||||||
from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||||
from cic_eth.encode import tx_normalize
|
from cic_eth.encode import tx_normalize
|
||||||
|
from cic_eth.eth.trust import verify_proofs
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -473,3 +476,69 @@ def cache_approve_data(
|
|||||||
session.close()
|
session.close()
|
||||||
return (tx_hash_hex, cache_id)
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def token_info(self, tokens, chain_spec_dict, proofs=[]):
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
for token in tokens:
|
||||||
|
result_data = []
|
||||||
|
token_chain_object = ERC20Token(chain_spec, rpc, add_0x(token['address']))
|
||||||
|
token_chain_object.load(rpc)
|
||||||
|
|
||||||
|
token_symbol_proof_hex = to_identifier(token_chain_object.symbol)
|
||||||
|
token_proofs = [token_symbol_proof_hex]
|
||||||
|
if len(proofs) > 0:
|
||||||
|
token_proofs += proofs[i]
|
||||||
|
|
||||||
|
tokens[i] = {
|
||||||
|
'decimals': token_chain_object.decimals,
|
||||||
|
'name': token_chain_object.name,
|
||||||
|
'symbol': token_chain_object.symbol,
|
||||||
|
'address': tx_normalize.executable_address(token_chain_object.address),
|
||||||
|
'proofs': token_proofs,
|
||||||
|
'converters': tokens[i]['converters'],
|
||||||
|
}
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def verify_token_info(self, tokens, chain_spec_dict, success_callback, error_callback):
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
for token in tokens:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.trust.verify_proofs',
|
||||||
|
[
|
||||||
|
token,
|
||||||
|
token['address'],
|
||||||
|
token['proofs'],
|
||||||
|
chain_spec_dict,
|
||||||
|
success_callback,
|
||||||
|
error_callback,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
if success_callback != None:
|
||||||
|
s.link(success_callback)
|
||||||
|
if error_callback != None:
|
||||||
|
s.on_error(error_callback)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def default_token(self):
|
||||||
|
return {
|
||||||
|
'symbol': self.default_token_symbol,
|
||||||
|
'address': self.default_token_address,
|
||||||
|
'name': self.default_token_name,
|
||||||
|
'decimals': self.default_token_decimals,
|
||||||
|
}
|
||||||
|
77
apps/cic-eth/cic_eth/eth/trust.py
Normal file
77
apps/cic-eth/cic_eth/eth/trust.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from eth_address_declarator import Declarator
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.task import BaseTask
|
||||||
|
from cic_eth.error import TrustError
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def verify_proof(self, chained_input, proof, subject, chain_spec_dict, success_callback, error_callback):
|
||||||
|
proof = strip_0x(proof)
|
||||||
|
|
||||||
|
proofs = []
|
||||||
|
|
||||||
|
logg.debug('proof count {}'.format(len(proofs)))
|
||||||
|
if len(proofs) == 0:
|
||||||
|
logg.debug('error {}'.format(len(proofs)))
|
||||||
|
raise TrustError('foo')
|
||||||
|
|
||||||
|
return (chained_input, (proof, proofs))
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def verify_proofs(self, chained_input, subject, proofs, chain_spec_dict, success_callback, error_callback):
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
|
session = self.create_session()
|
||||||
|
sender_address = AccountRole.get_address('DEFAULT', session)
|
||||||
|
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||||
|
|
||||||
|
declarator = Declarator(chain_spec)
|
||||||
|
|
||||||
|
have_proofs = {}
|
||||||
|
|
||||||
|
for proof in proofs:
|
||||||
|
|
||||||
|
proof = strip_0x(proof)
|
||||||
|
|
||||||
|
have_proofs[proof] = []
|
||||||
|
|
||||||
|
for trusted_address in self.trusted_addresses:
|
||||||
|
o = declarator.declaration(declarator_address, trusted_address, subject, sender_address=sender_address)
|
||||||
|
r = rpc.do(o)
|
||||||
|
declarations = declarator.parse_declaration(r)
|
||||||
|
logg.debug('comparing proof {} with declarations for {} by {}: {}'.format(proof, subject, trusted_address, declarations))
|
||||||
|
|
||||||
|
for declaration in declarations:
|
||||||
|
declaration = strip_0x(declaration)
|
||||||
|
if declaration == proof:
|
||||||
|
logg.debug('have token proof {} match for trusted address {}'.format(declaration, trusted_address))
|
||||||
|
have_proofs[proof].append(trusted_address)
|
||||||
|
|
||||||
|
out_proofs = {}
|
||||||
|
for proof in have_proofs.keys():
|
||||||
|
if len(have_proofs[proof]) == 0:
|
||||||
|
logg.error('missing signer for proof {} subject {}'.format(proof, subject))
|
||||||
|
raise TrustError((subject, proof,))
|
||||||
|
out_proofs[proof] = have_proofs[proof]
|
||||||
|
|
||||||
|
return (chained_input, out_proofs)
|
@ -4,18 +4,21 @@ import tempfile
|
|||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
# local impors
|
# local imports
|
||||||
from cic_eth.task import BaseTask
|
from cic_eth.task import BaseTask
|
||||||
|
|
||||||
#logg = logging.getLogger(__name__)
|
#logg = logging.getLogger(__name__)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def init_celery_tasks(
|
def init_celery_tasks(
|
||||||
contract_roles,
|
contract_roles,
|
||||||
):
|
):
|
||||||
BaseTask.call_address = contract_roles['DEFAULT']
|
BaseTask.call_address = contract_roles['DEFAULT']
|
||||||
|
BaseTask.trusted_addresses = [
|
||||||
|
contract_roles['TRUSTED_DECLARATOR'],
|
||||||
|
contract_roles['CONTRACT_DEPLOYER'],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
# celery fixtures
|
# celery fixtures
|
||||||
@ -38,6 +41,7 @@ def celery_includes():
|
|||||||
'cic_eth.callbacks.noop',
|
'cic_eth.callbacks.noop',
|
||||||
'cic_eth.callbacks.http',
|
'cic_eth.callbacks.http',
|
||||||
'cic_eth.pytest.mock.filter',
|
'cic_eth.pytest.mock.filter',
|
||||||
|
'cic_eth.pytest.mock.callback',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -1 +1,2 @@
|
|||||||
from .filter import *
|
from .filter import *
|
||||||
|
from .callback import *
|
||||||
|
38
apps/cic-eth/cic_eth/pytest/mock/callback.py
Normal file
38
apps/cic-eth/cic_eth/pytest/mock/callback.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import mmap
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
#logg = logging.getLogger(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackTask(celery.Task):
|
||||||
|
|
||||||
|
mmap_path = tempfile.mkdtemp()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CallbackTask)
|
||||||
|
def test_callback(self, a, b, c):
|
||||||
|
s = 'ok'
|
||||||
|
if c > 0:
|
||||||
|
s = 'err'
|
||||||
|
|
||||||
|
fp = os.path.join(self.mmap_path, b)
|
||||||
|
f = open(fp, 'wb+')
|
||||||
|
f.write(b'\x00')
|
||||||
|
f.seek(0)
|
||||||
|
m = mmap.mmap(f.fileno(), length=1)
|
||||||
|
m.write(c.to_bytes(1, 'big'))
|
||||||
|
m.close()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
logg.debug('test callback ({}): {} {} {}'.format(s, a, b, c))
|
@ -10,7 +10,6 @@ import datetime
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from cic_eth_registry import CICRegistry
|
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.eth.tx import unpack
|
from chainlib.eth.tx import unpack
|
||||||
from chainlib.connection import RPCConnection
|
from chainlib.connection import RPCConnection
|
||||||
|
@ -76,7 +76,7 @@ arg_flags = cic_eth.cli.argflag_std_read
|
|||||||
local_arg_flags = cic_eth.cli.argflag_local_task
|
local_arg_flags = cic_eth.cli.argflag_local_task
|
||||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||||
argparser.process_local_flags(local_arg_flags)
|
argparser.process_local_flags(local_arg_flags)
|
||||||
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
#argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||||
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
||||||
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
||||||
@ -84,7 +84,7 @@ args = argparser.parse_args()
|
|||||||
|
|
||||||
# process config
|
# process config
|
||||||
extra_args = {
|
extra_args = {
|
||||||
'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
# 'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||||
'aux_all': None,
|
'aux_all': None,
|
||||||
'aux': None,
|
'aux': None,
|
||||||
'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
|
'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
|
||||||
@ -187,6 +187,17 @@ elif len(args.aux) > 0:
|
|||||||
logg.info('aux module {} found in path {}'.format(v, aux_dir))
|
logg.info('aux module {} found in path {}'.format(v, aux_dir))
|
||||||
aux.append(v)
|
aux.append(v)
|
||||||
|
|
||||||
|
default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||||
|
defaullt_token_address = None
|
||||||
|
if default_token_symbol:
|
||||||
|
default_token_address = registry.by_name(default_token_symbol)
|
||||||
|
else:
|
||||||
|
default_token_address = registry.by_name('DefaultToken')
|
||||||
|
c = ERC20Token(chain_spec, conn, default_token_address)
|
||||||
|
default_token_symbol = c.symbol
|
||||||
|
logg.info('found default token {} address {}'.format(default_token_symbol, default_token_address))
|
||||||
|
config.add(default_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||||
|
|
||||||
for v in aux:
|
for v in aux:
|
||||||
mname = 'cic_eth_aux.' + v
|
mname = 'cic_eth_aux.' + v
|
||||||
mod = importlib.import_module(mname)
|
mod = importlib.import_module(mname)
|
||||||
@ -204,12 +215,13 @@ def main():
|
|||||||
argv.append('-n')
|
argv.append('-n')
|
||||||
argv.append(config.get('CELERY_QUEUE'))
|
argv.append(config.get('CELERY_QUEUE'))
|
||||||
|
|
||||||
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
BaseTask.default_token_symbol = default_token_symbol
|
||||||
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
BaseTask.default_token_address = default_token_address
|
||||||
default_token = ERC20Token(chain_spec, conn, add_0x(BaseTask.default_token_address))
|
default_token = ERC20Token(chain_spec, conn, add_0x(BaseTask.default_token_address))
|
||||||
default_token.load(conn)
|
default_token.load(conn)
|
||||||
BaseTask.default_token_decimals = default_token.decimals
|
BaseTask.default_token_decimals = default_token.decimals
|
||||||
BaseTask.default_token_name = default_token.name
|
BaseTask.default_token_name = default_token.name
|
||||||
|
BaseTask.trusted_addresses = trusted_addresses
|
||||||
|
|
||||||
BaseTask.run_dir = config.get('CIC_RUN_DIR')
|
BaseTask.run_dir = config.get('CIC_RUN_DIR')
|
||||||
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))
|
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))
|
||||||
|
@ -28,6 +28,7 @@ class BaseTask(celery.Task):
|
|||||||
|
|
||||||
session_func = SessionBase.create_session
|
session_func = SessionBase.create_session
|
||||||
call_address = ZERO_ADDRESS
|
call_address = ZERO_ADDRESS
|
||||||
|
trusted_addresses = []
|
||||||
create_nonce_oracle = RPCNonceOracle
|
create_nonce_oracle = RPCNonceOracle
|
||||||
create_gas_oracle = RPCGasOracle
|
create_gas_oracle = RPCGasOracle
|
||||||
default_token_address = None
|
default_token_address = None
|
||||||
|
@ -10,7 +10,7 @@ version = (
|
|||||||
0,
|
0,
|
||||||
12,
|
12,
|
||||||
4,
|
4,
|
||||||
'alpha.8',
|
'alpha.13',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
@ -1,46 +1,36 @@
|
|||||||
# syntax = docker/dockerfile:1.2
|
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
|
||||||
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||||
|
|
||||||
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||||
# do load these all into setup.py later
|
# do load these all into setup.py later
|
||||||
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||||
#COPY cic-eth/requirements.txt .
|
#COPY cic-eth/requirements.txt .
|
||||||
|
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
|
||||||
ARG EXTRA_PIP_ARGS=""
|
ARG EXTRA_PIP_ARGS=""
|
||||||
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||||
# pip install --index-url https://pypi.org/simple \
|
|
||||||
# --force-reinstall \
|
|
||||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
|
||||||
# -r requirements.txt
|
|
||||||
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
RUN apt-get install libffi-dev
|
||||||
pip install --index-url https://pypi.org/simple \
|
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
--extra-index-url $EXTRA_INDEX_URL \
|
--pre \
|
||||||
$EXTRA_PIP_ARGS \
|
--force-reinstall \
|
||||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
--no-cache \
|
||||||
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
|
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||||
|
|
||||||
|
|
||||||
COPY *requirements.txt ./
|
COPY *requirements.txt ./
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
pip install --index-url https://pypi.org/simple \
|
--pre \
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
--force-reinstall \
|
||||||
--extra-index-url $EXTRA_INDEX_URL \
|
--no-cache \
|
||||||
$EXTRA_PIP_ARGS \
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
-r requirements.txt \
|
-r requirements.txt \
|
||||||
-r services_requirements.txt \
|
-r services_requirements.txt \
|
||||||
-r admin_requirements.txt
|
-r admin_requirements.txt
|
||||||
|
|
||||||
# always install the latest signer
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
|
||||||
pip install --index-url https://pypi.org/simple \
|
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
|
||||||
--extra-index-url $EXTRA_INDEX_URL \
|
|
||||||
$EXTRA_PIP_ARGS \
|
|
||||||
crypto-dev-signer
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN python setup.py install
|
RUN python setup.py install
|
||||||
|
|
||||||
@ -53,7 +43,7 @@ RUN chmod 755 *.sh
|
|||||||
# # ini files in config directory defines the configurable parameters for the application
|
# # ini files in config directory defines the configurable parameters for the application
|
||||||
# # they can all be overridden by environment variables
|
# # they can all be overridden by environment variables
|
||||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
COPY config/ /usr/local/etc/cic-eth/
|
#COPY config/ /usr/local/etc/cic-eth/
|
||||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
|
||||||
|
@ -2,5 +2,6 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
>&2 echo executing database migration
|
>&2 echo executing database migration
|
||||||
python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
#python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||||
|
python scripts/migrate.py --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||||
set +e
|
set +e
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
chainlib-eth>=0.0.9rc2,<0.1.0
|
chainlib-eth>=0.0.10a4,<0.1.0
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
crypto-dev-signer>=0.4.15rc2,<0.5.0
|
crypto-dev-signer>=0.4.15rc2,<0.5.0
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
chainqueue>=0.0.5a1,<0.1.0
|
chainqueue>=0.0.6a1,<0.1.0
|
||||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
chainsyncer[sql]>=0.0.7a3,<0.1.0
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini>=0.3.6rc4,<0.5.0
|
confini>=0.3.6rc4,<0.5.0
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
hexathon~=0.0.1a8
|
hexathon~=0.0.1a8
|
||||||
pycryptodome==3.10.1
|
pycryptodome==3.10.1
|
||||||
liveness~=0.0.1a7
|
liveness~=0.0.1a7
|
||||||
eth-address-index>=0.2.3a4,<0.3.0
|
eth-address-index>=0.2.4a1,<0.3.0
|
||||||
eth-accounts-index>=0.1.2a3,<0.2.0
|
eth-accounts-index>=0.1.2a3,<0.2.0
|
||||||
cic-eth-registry>=0.6.1a3,<0.7.0
|
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||||
erc20-faucet>=0.3.2a2,<0.4.0
|
erc20-faucet>=0.3.2a2,<0.4.0
|
||||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||||
moolb~=0.1.1b2
|
moolb~=0.1.1b2
|
||||||
|
okota>=0.2.4a6,<0.3.0
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name = cic-eth
|
name = cic-eth
|
||||||
version = attr: cic_eth.version.__version_string__
|
#version = attr: cic_eth.version.__version_string__
|
||||||
|
version = 0.12.4a13
|
||||||
description = CIC Network Ethereum interaction
|
description = CIC Network Ethereum interaction
|
||||||
author = Louis Holbrook
|
author = Louis Holbrook
|
||||||
author_email = dev@holbrook.no
|
author_email = dev@holbrook.no
|
||||||
|
@ -1,6 +1,27 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import time
|
||||||
|
import mmap
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
import pytest
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
uniform as hex_uniform,
|
||||||
|
)
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api.api_task import Api
|
from cic_eth.api.api_task import Api
|
||||||
from cic_eth.task import BaseTask
|
from cic_eth.task import BaseTask
|
||||||
|
from cic_eth.error import TrustError
|
||||||
|
from cic_eth.encode import tx_normalize
|
||||||
|
from cic_eth.pytest.mock.callback import CallbackTask
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def test_default_token(
|
def test_default_token(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
@ -17,3 +38,175 @@ def test_default_token(
|
|||||||
t = api.default_token()
|
t = api.default_token()
|
||||||
r = t.get_leaf()
|
r = t.get_leaf()
|
||||||
assert r['address'] == foo_token
|
assert r['address'] == foo_token
|
||||||
|
|
||||||
|
|
||||||
|
def test_to_v_list():
|
||||||
|
assert Api.to_v_list('', 0) == []
|
||||||
|
assert Api.to_v_list([], 0) == []
|
||||||
|
assert Api.to_v_list('foo', 1) == [['foo']]
|
||||||
|
assert Api.to_v_list(['foo'], 1) == [['foo']]
|
||||||
|
assert Api.to_v_list(['foo', 'bar'], 2) == [['foo'], ['bar']]
|
||||||
|
assert Api.to_v_list('foo', 3) == [['foo'], ['foo'], ['foo']]
|
||||||
|
assert Api.to_v_list([['foo'], ['bar']], 2) == [['foo'], ['bar']]
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Api.to_v_list([['foo'], ['bar']], 3)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Api.to_v_list(['foo', 'bar'], 3)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Api.to_v_list([['foo'], ['bar'], ['baz']], 2)
|
||||||
|
|
||||||
|
assert Api.to_v_list([
|
||||||
|
['foo'],
|
||||||
|
'bar',
|
||||||
|
['inky', 'pinky', 'blinky', 'clyde'],
|
||||||
|
], 3) == [
|
||||||
|
['foo'],
|
||||||
|
['bar'],
|
||||||
|
['inky', 'pinky', 'blinky', 'clyde'],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_token_single(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
bar_token,
|
||||||
|
token_registry,
|
||||||
|
register_tokens,
|
||||||
|
register_lookups,
|
||||||
|
cic_registry,
|
||||||
|
init_database,
|
||||||
|
init_celery_tasks,
|
||||||
|
custodial_roles,
|
||||||
|
foo_token_declaration,
|
||||||
|
bar_token_declaration,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = Api(str(default_chain_spec), queue=None, callback_param='foo')
|
||||||
|
|
||||||
|
t = api.token('FOO', proof=None)
|
||||||
|
r = t.get()
|
||||||
|
logg.debug('rr {}'.format(r))
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(foo_token)
|
||||||
|
|
||||||
|
|
||||||
|
t = api.token('FOO', proof=foo_token_declaration)
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(foo_token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_tokens_noproof(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
bar_token,
|
||||||
|
token_registry,
|
||||||
|
register_tokens,
|
||||||
|
register_lookups,
|
||||||
|
cic_registry,
|
||||||
|
init_database,
|
||||||
|
init_celery_tasks,
|
||||||
|
custodial_roles,
|
||||||
|
foo_token_declaration,
|
||||||
|
bar_token_declaration,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = Api(str(default_chain_spec), queue=None, callback_param='foo')
|
||||||
|
|
||||||
|
t = api.tokens(['FOO'], proof=[])
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(foo_token)
|
||||||
|
|
||||||
|
t = api.tokens(['BAR'], proof='')
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(bar_token)
|
||||||
|
|
||||||
|
t = api.tokens(['FOO'], proof=None)
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(foo_token)
|
||||||
|
|
||||||
|
|
||||||
|
def test_tokens(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
bar_token,
|
||||||
|
token_registry,
|
||||||
|
register_tokens,
|
||||||
|
register_lookups,
|
||||||
|
cic_registry,
|
||||||
|
init_database,
|
||||||
|
init_celery_tasks,
|
||||||
|
custodial_roles,
|
||||||
|
foo_token_declaration,
|
||||||
|
bar_token_declaration,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = Api(str(default_chain_spec), queue=None, callback_param='foo')
|
||||||
|
|
||||||
|
t = api.tokens(['FOO'], proof=[[foo_token_declaration]])
|
||||||
|
r = t.get()
|
||||||
|
logg.debug('rr {}'.format(r))
|
||||||
|
assert len(r) == 1
|
||||||
|
assert r[0]['address'] == strip_0x(foo_token)
|
||||||
|
|
||||||
|
t = api.tokens(['BAR', 'FOO'], proof=[[bar_token_declaration], [foo_token_declaration]])
|
||||||
|
r = t.get()
|
||||||
|
logg.debug('results {}'.format(r))
|
||||||
|
assert len(r) == 2
|
||||||
|
assert r[1]['address'] == strip_0x(foo_token)
|
||||||
|
assert r[0]['address'] == strip_0x(bar_token)
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
results = []
|
||||||
|
targets = []
|
||||||
|
|
||||||
|
api_param = str(uuid.uuid4())
|
||||||
|
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
|
||||||
|
bogus_proof = os.urandom(32).hex()
|
||||||
|
t = api.tokens(['FOO'], proof=[[bogus_proof]])
|
||||||
|
r = t.get()
|
||||||
|
logg.debug('r {}'.format(r))
|
||||||
|
|
||||||
|
while True:
|
||||||
|
fp = os.path.join(CallbackTask.mmap_path, api_param)
|
||||||
|
try:
|
||||||
|
f = open(fp, 'rb')
|
||||||
|
except FileNotFoundError:
|
||||||
|
time.sleep(0.1)
|
||||||
|
logg.debug('look for {}'.format(fp))
|
||||||
|
continue
|
||||||
|
f = open(fp, 'rb')
|
||||||
|
m = mmap.mmap(f.fileno(), access=mmap.ACCESS_READ, length=1)
|
||||||
|
v = m.read(1)
|
||||||
|
m.close()
|
||||||
|
f.close()
|
||||||
|
assert v == b'\x01'
|
||||||
|
break
|
||||||
|
|
||||||
|
api_param = str(uuid.uuid4())
|
||||||
|
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
|
||||||
|
t = api.tokens(['BAR'], proof=[[bar_token_declaration]])
|
||||||
|
r = t.get()
|
||||||
|
logg.debug('rr {} {}'.format(r, t.children))
|
||||||
|
|
||||||
|
while True:
|
||||||
|
fp = os.path.join(CallbackTask.mmap_path, api_param)
|
||||||
|
try:
|
||||||
|
f = open(fp, 'rb')
|
||||||
|
except FileNotFoundError:
|
||||||
|
time.sleep(0.1)
|
||||||
|
continue
|
||||||
|
m = mmap.mmap(f.fileno(), access=mmap.ACCESS_READ, length=1)
|
||||||
|
v = m.read(1)
|
||||||
|
m.close()
|
||||||
|
f.close()
|
||||||
|
assert v == b'\x00'
|
||||||
|
break
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ def test_default_token(
|
|||||||
):
|
):
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.admin.token.default_token',
|
'cic_eth.eth.erc20.default_token',
|
||||||
[],
|
[],
|
||||||
queue=None,
|
queue=None,
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
crypto-dev-signer>=0.4.15a7,<=0.4.15
|
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||||
chainqueue>=0.0.5a1,<0.1.0
|
chainqueue>=0.0.5a3,<0.1.0
|
||||||
cic-eth-registry>=0.6.1a3,<0.7.0
|
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
hexathon~=0.0.1a8
|
hexathon~=0.0.1a8
|
||||||
pycryptodome==3.10.1
|
pycryptodome==3.10.1
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# syntax = docker/dockerfile:1.2
|
FROM node:15.3.0-alpine3.10
|
||||||
#FROM node:15.3.0-alpine3.10
|
#FROM node:lts-alpine3.14
|
||||||
FROM node:lts-alpine3.14
|
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
RUN apk add --no-cache postgresql bash
|
RUN apk add --no-cache postgresql bash
|
||||||
|
|
||||||
# copy the dependencies
|
# copy the dependencies
|
||||||
COPY package.json package-lock.json .
|
COPY package.json package-lock.json ./
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
#RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||||
npm set cache /root/.npm && \
|
RUN npm set cache /root/.npm && \
|
||||||
npm ci
|
npm cache verify && \
|
||||||
|
npm ci --verbose
|
||||||
|
|
||||||
COPY webpack.config.js .
|
COPY webpack.config.js .
|
||||||
COPY tsconfig.json .
|
COPY tsconfig.json .
|
||||||
|
@ -2,21 +2,24 @@
|
|||||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||||
|
RUN apt-get install libffi-dev -y
|
||||||
|
|
||||||
|
|
||||||
|
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||||
|
ARG EXTRA_PIP_ARGS=""
|
||||||
|
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||||
|
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
pip install --index-url https://pypi.org/simple \
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN python setup.py install
|
RUN python setup.py install
|
||||||
|
|
||||||
COPY docker/*.sh .
|
COPY docker/*.sh ./
|
||||||
RUN chmod +x *.sh
|
RUN chmod +x /root/*.sh
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
# they can all be overridden by environment variables
|
# they can all be overridden by environment variables
|
||||||
|
23
apps/cic-signer/Dockerfile
Normal file
23
apps/cic-signer/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
|
||||||
|
|
||||||
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
RUN apt-get install libffi-dev -y
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG EXTRA_PIP_ARGS=""
|
||||||
|
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||||
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
|
--pre \
|
||||||
|
--force-reinstall \
|
||||||
|
--no-cache \
|
||||||
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
#RUN chmod +x *.sh
|
1
apps/cic-signer/requirements.txt
Normal file
1
apps/cic-signer/requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
funga-eth[sql]>=0.5.1a1,<0.6.0
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
# third-party imports
|
# third-party imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.transaction import from_wei
|
from cic_ussd.account.transaction import from_wei
|
||||||
@ -102,7 +103,7 @@ def get_cached_available_balance(blockchain_address: str) -> float:
|
|||||||
:rtype: float
|
:rtype: float
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(blockchain_address)
|
identifier = bytes.fromhex(blockchain_address)
|
||||||
key = cache_data_key(identifier, salt=':cic.balances')
|
key = cache_data_key(identifier, salt=MetadataPointer.BALANCES)
|
||||||
cached_balances = get_cached_data(key=key)
|
cached_balances = get_cached_data(key=key)
|
||||||
if cached_balances:
|
if cached_balances:
|
||||||
return calculate_available_balance(json.loads(cached_balances))
|
return calculate_available_balance(json.loads(cached_balances))
|
||||||
@ -117,5 +118,5 @@ def get_cached_adjusted_balance(identifier: bytes):
|
|||||||
:return:
|
:return:
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
return get_cached_data(key)
|
return get_cached_data(key)
|
||||||
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
import celery
|
import celery
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local import
|
# local import
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -53,7 +54,7 @@ def get_cached_statement(blockchain_address: str) -> bytes:
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
||||||
key = cache_data_key(identifier=identifier, salt=':cic.statement')
|
key = cache_data_key(identifier=identifier, salt=MetadataPointer.STATEMENT)
|
||||||
return get_cached_data(key=key)
|
return get_cached_data(key=key)
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ from typing import Dict, Optional
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -23,7 +24,7 @@ def get_cached_default_token(chain_str: str) -> Optional[str]:
|
|||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
||||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=':cic.default_token_data')
|
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=MetadataPointer.TOKEN_DEFAULT)
|
||||||
return get_cached_data(key=key)
|
return get_cached_data(key=key)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from redis import Redis
|
from redis import Redis
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -38,7 +39,7 @@ def get_cached_data(key: str):
|
|||||||
return cache.get(name=key)
|
return cache.get(name=key)
|
||||||
|
|
||||||
|
|
||||||
def cache_data_key(identifier: bytes, salt: str):
|
def cache_data_key(identifier: bytes, salt: MetadataPointer):
|
||||||
"""
|
"""
|
||||||
:param identifier:
|
:param identifier:
|
||||||
:type identifier:
|
:type identifier:
|
||||||
@ -49,5 +50,5 @@ def cache_data_key(identifier: bytes, salt: str):
|
|||||||
"""
|
"""
|
||||||
hash_object = hashlib.new("sha256")
|
hash_object = hashlib.new("sha256")
|
||||||
hash_object.update(identifier)
|
hash_object.update(identifier)
|
||||||
hash_object.update(salt.encode(encoding="utf-8"))
|
hash_object.update(salt.value.encode(encoding="utf-8"))
|
||||||
return hash_object.digest().hex()
|
return hash_object.digest().hex()
|
||||||
|
@ -3,6 +3,7 @@ import json
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
||||||
@ -109,7 +110,7 @@ class Account(SessionBase):
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(self.blockchain_address)
|
identifier = bytes.fromhex(self.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
account_metadata = get_cached_data(key)
|
account_metadata = get_cached_data(key)
|
||||||
if not account_metadata:
|
if not account_metadata:
|
||||||
return self.phone_number
|
return self.phone_number
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
# external imports
|
# external imports
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import Metadata
|
|
||||||
from .custom import CustomMetadata
|
from .custom import CustomMetadata
|
||||||
from .person import PersonMetadata
|
from .person import PersonMetadata
|
||||||
from .phone import PhonePointerMetadata
|
from .phone import PhonePointerMetadata
|
||||||
|
@ -1,99 +1,30 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from typing import Dict, Union
|
|
||||||
|
|
||||||
# third-part imports
|
# external imports
|
||||||
from cic_types.models.person import generate_metadata_pointer, Person
|
from cic_types.condiments import MetadataPointer
|
||||||
|
from cic_types.ext.metadata import MetadataRequestsHandler
|
||||||
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.cache import cache_data, get_cached_data
|
from cic_ussd.cache import cache_data, get_cached_data
|
||||||
from cic_ussd.http.requests import error_handler, make_request
|
|
||||||
from cic_ussd.metadata.signer import Signer
|
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
|
|
||||||
class Metadata:
|
class UssdMetadataHandler(MetadataRequestsHandler):
|
||||||
"""
|
def __init__(self, cic_type: MetadataPointer, identifier: bytes):
|
||||||
:cvar base_url: The base url or the metadata server.
|
super().__init__(cic_type, identifier)
|
||||||
:type base_url: str
|
|
||||||
"""
|
|
||||||
|
|
||||||
base_url = None
|
def cache_metadata(self, data: str):
|
||||||
|
"""
|
||||||
|
:param data:
|
||||||
class MetadataRequestsHandler(Metadata):
|
:type data:
|
||||||
|
:return:
|
||||||
def __init__(self, cic_type: str, identifier: bytes, engine: str = 'pgp'):
|
:rtype:
|
||||||
""""""
|
"""
|
||||||
self.cic_type = cic_type
|
cache_data(self.metadata_pointer, data)
|
||||||
self.engine = engine
|
logg.debug(f'caching: {data} with key: {self.metadata_pointer}')
|
||||||
self.headers = {
|
|
||||||
'X-CIC-AUTOMERGE': 'server',
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
self.identifier = identifier
|
|
||||||
self.metadata_pointer = generate_metadata_pointer(
|
|
||||||
identifier=self.identifier,
|
|
||||||
cic_type=self.cic_type
|
|
||||||
)
|
|
||||||
if self.base_url:
|
|
||||||
self.url = os.path.join(self.base_url, self.metadata_pointer)
|
|
||||||
|
|
||||||
def create(self, data: Union[Dict, str]):
|
|
||||||
""""""
|
|
||||||
data = json.dumps(data).encode('utf-8')
|
|
||||||
result = make_request(method='POST', url=self.url, data=data, headers=self.headers)
|
|
||||||
|
|
||||||
error_handler(result=result)
|
|
||||||
metadata = result.json()
|
|
||||||
return self.edit(data=metadata)
|
|
||||||
|
|
||||||
def edit(self, data: Union[Dict, str]):
|
|
||||||
""""""
|
|
||||||
cic_meta_signer = Signer()
|
|
||||||
signature = cic_meta_signer.sign_digest(data=data)
|
|
||||||
algorithm = cic_meta_signer.get_operational_key().get('algo')
|
|
||||||
formatted_data = {
|
|
||||||
'm': json.dumps(data),
|
|
||||||
's': {
|
|
||||||
'engine': self.engine,
|
|
||||||
'algo': algorithm,
|
|
||||||
'data': signature,
|
|
||||||
'digest': data.get('digest'),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
formatted_data = json.dumps(formatted_data)
|
|
||||||
result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers)
|
|
||||||
logg.info(f'signed metadata submission status: {result.status_code}.')
|
|
||||||
error_handler(result=result)
|
|
||||||
try:
|
|
||||||
decoded_identifier = self.identifier.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
decoded_identifier = self.identifier.hex()
|
|
||||||
logg.info(f'identifier: {decoded_identifier}. metadata pointer: {self.metadata_pointer} set to: {data}.')
|
|
||||||
return result
|
|
||||||
|
|
||||||
def query(self):
|
|
||||||
""""""
|
|
||||||
result = make_request(method='GET', url=self.url)
|
|
||||||
error_handler(result=result)
|
|
||||||
result_data = result.json()
|
|
||||||
if not isinstance(result_data, dict):
|
|
||||||
raise ValueError(f'Invalid result data object: {result_data}.')
|
|
||||||
if result.status_code == 200:
|
|
||||||
if self.cic_type == ':cic.person':
|
|
||||||
person = Person()
|
|
||||||
person_data = person.deserialize(person_data=result_data)
|
|
||||||
serialized_person_data = person_data.serialize()
|
|
||||||
data = json.dumps(serialized_person_data)
|
|
||||||
else:
|
|
||||||
data = json.dumps(result_data)
|
|
||||||
cache_data(key=self.metadata_pointer, data=data)
|
|
||||||
logg.debug(f'caching: {data} with key: {self.metadata_pointer}')
|
|
||||||
return result_data
|
|
||||||
|
|
||||||
def get_cached_metadata(self):
|
def get_cached_metadata(self):
|
||||||
""""""
|
""""""
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import MetadataRequestsHandler
|
from .base import UssdMetadataHandler
|
||||||
|
|
||||||
|
|
||||||
class CustomMetadata(MetadataRequestsHandler):
|
class CustomMetadata(UssdMetadataHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type=':cic.custom', identifier=identifier)
|
super().__init__(cic_type=MetadataPointer.CUSTOM, identifier=identifier)
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import MetadataRequestsHandler
|
from .base import UssdMetadataHandler
|
||||||
|
|
||||||
|
|
||||||
class PersonMetadata(MetadataRequestsHandler):
|
class PersonMetadata(UssdMetadataHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type=':cic.person', identifier=identifier)
|
super().__init__(cic_type=MetadataPointer.PERSON, identifier=identifier)
|
||||||
|
@ -2,12 +2,13 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import MetadataRequestsHandler
|
from .base import UssdMetadataHandler
|
||||||
|
|
||||||
|
|
||||||
class PhonePointerMetadata(MetadataRequestsHandler):
|
class PhonePointerMetadata(UssdMetadataHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type=':cic.phone', identifier=identifier)
|
super().__init__(cic_type=MetadataPointer.PHONE, identifier=identifier)
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import MetadataRequestsHandler
|
from .base import UssdMetadataHandler
|
||||||
|
|
||||||
|
|
||||||
class PreferencesMetadata(MetadataRequestsHandler):
|
class PreferencesMetadata(UssdMetadataHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type=':cic.preferences', identifier=identifier)
|
super().__init__(cic_type=MetadataPointer.PREFERENCES, identifier=identifier)
|
||||||
|
@ -1,60 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
from urllib.request import Request, urlopen
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import gnupg
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class Signer:
|
|
||||||
"""
|
|
||||||
:cvar gpg_path:
|
|
||||||
:type gpg_path:
|
|
||||||
:cvar gpg_passphrase:
|
|
||||||
:type gpg_passphrase:
|
|
||||||
:cvar key_file_path:
|
|
||||||
:type key_file_path:
|
|
||||||
|
|
||||||
"""
|
|
||||||
gpg_path: str = None
|
|
||||||
gpg_passphrase: str = None
|
|
||||||
key_file_path: str = None
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.gpg = gnupg.GPG(gnupghome=self.gpg_path)
|
|
||||||
|
|
||||||
with open(self.key_file_path, 'r') as key_file:
|
|
||||||
self.key_data = key_file.read()
|
|
||||||
|
|
||||||
def get_operational_key(self):
|
|
||||||
"""
|
|
||||||
:return:
|
|
||||||
:rtype:
|
|
||||||
"""
|
|
||||||
# import key data into keyring
|
|
||||||
self.gpg.import_keys(key_data=self.key_data)
|
|
||||||
gpg_keys = self.gpg.list_keys()
|
|
||||||
key_algorithm = gpg_keys[0].get('algo')
|
|
||||||
key_id = gpg_keys[0].get("keyid")
|
|
||||||
logg.debug(f'using signing key: {key_id}, algorithm: {key_algorithm}')
|
|
||||||
return gpg_keys[0]
|
|
||||||
|
|
||||||
def sign_digest(self, data: dict):
|
|
||||||
"""
|
|
||||||
:param data:
|
|
||||||
:type data:
|
|
||||||
:return:
|
|
||||||
:rtype:
|
|
||||||
"""
|
|
||||||
digest = data['digest']
|
|
||||||
key_id = self.get_operational_key().get('keyid')
|
|
||||||
signature = self.gpg.sign(digest, passphrase=self.gpg_passphrase, keyid=key_id)
|
|
||||||
return str(signature)
|
|
||||||
|
|
||||||
|
|
@ -5,6 +5,7 @@ from datetime import datetime, timedelta
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import i18n.config
|
import i18n.config
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import (calculate_available_balance,
|
from cic_ussd.account.balance import (calculate_available_balance,
|
||||||
@ -163,7 +164,7 @@ class MenuProcessor:
|
|||||||
token_symbol = get_default_token_symbol()
|
token_symbol = get_default_token_symbol()
|
||||||
blockchain_address = self.account.blockchain_address
|
blockchain_address = self.account.blockchain_address
|
||||||
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
||||||
key = cache_data_key(self.identifier, ':cic.balances')
|
key = cache_data_key(self.identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, json.dumps(balances))
|
cache_data(key, json.dumps(balances))
|
||||||
available_balance = calculate_available_balance(balances)
|
available_balance = calculate_available_balance(balances)
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
@ -173,7 +174,7 @@ class MenuProcessor:
|
|||||||
else:
|
else:
|
||||||
timestamp = int((now - timedelta(30)).timestamp())
|
timestamp = int((now - timedelta(30)).timestamp())
|
||||||
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
||||||
key = cache_data_key(self.identifier, ':cic.adjusted_balance')
|
key = cache_data_key(self.identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
cache_data(key, json.dumps(adjusted_balance))
|
cache_data(key, json.dumps(adjusted_balance))
|
||||||
|
|
||||||
query_statement(blockchain_address)
|
query_statement(blockchain_address)
|
||||||
|
@ -10,14 +10,14 @@ import i18n
|
|||||||
import redis
|
import redis
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from confini import Config
|
from confini import Config
|
||||||
|
from cic_types.ext.metadata import Metadata
|
||||||
|
from cic_types.ext.metadata.signer import Signer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
from cic_ussd.cache import Cache
|
from cic_ussd.cache import Cache
|
||||||
from cic_ussd.db import dsn_from_config
|
from cic_ussd.db import dsn_from_config
|
||||||
from cic_ussd.db.models.base import SessionBase
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.metadata.signer import Signer
|
|
||||||
from cic_ussd.metadata.base import Metadata
|
|
||||||
from cic_ussd.phone_number import Support
|
from cic_ussd.phone_number import Support
|
||||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||||
from cic_ussd.validator import validate_presence
|
from cic_ussd.validator import validate_presence
|
||||||
@ -87,11 +87,8 @@ Signer.key_file_path = key_file_path
|
|||||||
i18n.load_path.append(config.get('LOCALE_PATH'))
|
i18n.load_path.append(config.get('LOCALE_PATH'))
|
||||||
i18n.set('fallback', config.get('LOCALE_FALLBACK'))
|
i18n.set('fallback', config.get('LOCALE_FALLBACK'))
|
||||||
|
|
||||||
chain_spec = ChainSpec(
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||||
common_name=config.get('CIC_COMMON_NAME'),
|
|
||||||
engine=config.get('CIC_ENGINE'),
|
|
||||||
network_id=config.get('CIC_NETWORK_ID')
|
|
||||||
)
|
|
||||||
|
|
||||||
Chain.spec = chain_spec
|
Chain.spec = chain_spec
|
||||||
Support.phone_number = config.get('OFFICE_SUPPORT_PHONE')
|
Support.phone_number = config.get('OFFICE_SUPPORT_PHONE')
|
||||||
|
@ -12,6 +12,9 @@ import i18n
|
|||||||
import redis
|
import redis
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from confini import Config
|
from confini import Config
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
from cic_types.ext.metadata import Metadata
|
||||||
|
from cic_types.ext.metadata.signer import Signer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -25,8 +28,6 @@ from cic_ussd.files.local_files import create_local_file_data_stores, json_file_
|
|||||||
from cic_ussd.http.requests import get_request_endpoint, get_request_method
|
from cic_ussd.http.requests import get_request_endpoint, get_request_method
|
||||||
from cic_ussd.http.responses import with_content_headers
|
from cic_ussd.http.responses import with_content_headers
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata.base import Metadata
|
|
||||||
from cic_ussd.metadata.signer import Signer
|
|
||||||
from cic_ussd.phone_number import process_phone_number, Support, E164Format
|
from cic_ussd.phone_number import process_phone_number, Support, E164Format
|
||||||
from cic_ussd.processor.ussd import handle_menu_operations
|
from cic_ussd.processor.ussd import handle_menu_operations
|
||||||
from cic_ussd.runnable.server_base import exportable_parser, logg
|
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||||
@ -96,11 +97,7 @@ celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY
|
|||||||
states = json_file_parser(filepath=config.get('MACHINE_STATES'))
|
states = json_file_parser(filepath=config.get('MACHINE_STATES'))
|
||||||
transitions = json_file_parser(filepath=config.get('MACHINE_TRANSITIONS'))
|
transitions = json_file_parser(filepath=config.get('MACHINE_TRANSITIONS'))
|
||||||
|
|
||||||
chain_spec = ChainSpec(
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||||
common_name=config.get('CIC_COMMON_NAME'),
|
|
||||||
engine=config.get('CIC_ENGINE'),
|
|
||||||
network_id=config.get('CIC_NETWORK_ID')
|
|
||||||
)
|
|
||||||
|
|
||||||
Chain.spec = chain_spec
|
Chain.spec = chain_spec
|
||||||
UssdStateMachine.states = states
|
UssdStateMachine.states = states
|
||||||
@ -113,7 +110,7 @@ default_token_data = query_default_token(chain_str)
|
|||||||
|
|
||||||
# cache default token for re-usability
|
# cache default token for re-usability
|
||||||
if default_token_data:
|
if default_token_data:
|
||||||
cache_key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
cache_key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||||
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||||
else:
|
else:
|
||||||
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||||
|
@ -3,8 +3,10 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
||||||
@ -87,7 +89,7 @@ def balances_callback(result: list, param: str, status_code: int):
|
|||||||
|
|
||||||
balances = result[0]
|
balances = result[0]
|
||||||
identifier = bytes.fromhex(param)
|
identifier = bytes.fromhex(param)
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, json.dumps(balances))
|
cache_data(key, json.dumps(balances))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
|
from cic_types.models.person import Person
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.metadata import CustomMetadata, PersonMetadata, PhonePointerMetadata, PreferencesMetadata
|
from cic_ussd.metadata import CustomMetadata, PersonMetadata, PhonePointerMetadata, PreferencesMetadata
|
||||||
from cic_ussd.tasks.base import CriticalMetadataTask
|
from cic_ussd.tasks.base import CriticalMetadataTask
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger().getChild(__name__)
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task
|
@celery_app.task
|
||||||
@ -22,7 +24,13 @@ def query_person_metadata(blockchain_address: str):
|
|||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(blockchain_address)
|
identifier = bytes.fromhex(blockchain_address)
|
||||||
person_metadata_client = PersonMetadata(identifier=identifier)
|
person_metadata_client = PersonMetadata(identifier=identifier)
|
||||||
person_metadata_client.query()
|
response = person_metadata_client.query()
|
||||||
|
data = response.json()
|
||||||
|
person = Person()
|
||||||
|
person_data = person.deserialize(person_data=data)
|
||||||
|
serialized_person_data = person_data.serialize()
|
||||||
|
data = json.dumps(serialized_person_data)
|
||||||
|
person_metadata_client.cache_metadata(data=data)
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task
|
@celery_app.task
|
||||||
@ -76,6 +84,9 @@ def query_preferences_metadata(blockchain_address: str):
|
|||||||
:type blockchain_address: str | Ox-hex
|
:type blockchain_address: str | Ox-hex
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(blockchain_address)
|
identifier = bytes.fromhex(blockchain_address)
|
||||||
logg.debug(f'Retrieving preferences metadata for address: {blockchain_address}.')
|
logg.debug(f'retrieving preferences metadata for address: {blockchain_address}.')
|
||||||
person_metadata_client = PreferencesMetadata(identifier=identifier)
|
preferences_metadata_client = PreferencesMetadata(identifier=identifier)
|
||||||
return person_metadata_client.query()
|
response = preferences_metadata_client.query()
|
||||||
|
data = json.dumps(response.json())
|
||||||
|
preferences_metadata_client.cache_metadata(data)
|
||||||
|
return data
|
||||||
|
@ -2,9 +2,10 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import i18n
|
import i18n
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||||
@ -49,7 +50,7 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
|
|||||||
statement_transactions.append(parsed_transaction)
|
statement_transactions.append(parsed_transaction)
|
||||||
data = json.dumps(statement_transactions)
|
data = json.dumps(statement_transactions)
|
||||||
identifier = bytes.fromhex(querying_party)
|
identifier = bytes.fromhex(querying_party)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cache_data(key, data)
|
cache_data(key, data)
|
||||||
|
|
||||||
|
|
||||||
|
2
apps/cic-ussd/config/chain.ini
Normal file
2
apps/cic-ussd/config/chain.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[chain]
|
||||||
|
spec =
|
@ -1,5 +1,2 @@
|
|||||||
[cic]
|
[cic]
|
||||||
engine = evm
|
|
||||||
common_name = bloxberg
|
|
||||||
network_id = 8996
|
|
||||||
meta_url = http://localhost:63380
|
meta_url = http://localhost:63380
|
||||||
|
2
apps/cic-ussd/config/test/chain.ini
Normal file
2
apps/cic-ussd/config/test/chain.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[chain]
|
||||||
|
spec = 'evm:foo:1:bar'
|
@ -1,5 +1,2 @@
|
|||||||
[cic]
|
[cic]
|
||||||
engine = evm
|
|
||||||
common_name = bloxberg
|
|
||||||
network_id = 8996
|
|
||||||
meta_url = http://test-meta.io
|
meta_url = http://test-meta.io
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
# syntax = docker/dockerfile:1.2
|
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
|
||||||
RUN apt-get install -y redis-server
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||||
|
|
||||||
|
RUN apt-get install redis-server libffi-dev -y
|
||||||
# create secrets directory
|
# create secrets directory
|
||||||
RUN mkdir -vp pgp/keys
|
RUN mkdir -vp pgp/keys
|
||||||
|
|
||||||
@ -8,28 +10,27 @@ RUN mkdir -vp pgp/keys
|
|||||||
RUN mkdir -vp cic-ussd
|
RUN mkdir -vp cic-ussd
|
||||||
RUN mkdir -vp data
|
RUN mkdir -vp data
|
||||||
|
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
ARG EXTRA_PIP_ARGS=""
|
||||||
|
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||||
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
pip install --index-url https://pypi.org/simple \
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||||
--extra-index-url $EXTRA_INDEX_URL \
|
|
||||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
|
||||||
|
|
||||||
COPY requirements.txt .
|
|
||||||
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
COPY *requirements.txt ./
|
||||||
pip install --index-url https://pypi.org/simple \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN python setup.py install
|
RUN python setup.py install
|
||||||
|
|
||||||
COPY cic_ussd/db/ussd_menu.json data/
|
COPY cic_ussd/db/ussd_menu.json data/
|
||||||
|
|
||||||
COPY docker/*.sh .
|
COPY docker/*.sh ./
|
||||||
RUN chmod +x /root/*.sh
|
RUN chmod +x /root/*.sh
|
||||||
|
|
||||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||||
|
@ -4,10 +4,10 @@ billiard==3.6.4.0
|
|||||||
bcrypt==3.2.0
|
bcrypt==3.2.0
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
cffi==1.14.6
|
cffi==1.14.6
|
||||||
cic-eth[services]~=0.12.4a7
|
cic-eth~=0.12.4a13
|
||||||
cic-notify~=0.4.0a10
|
cic-notify~=0.4.0a10
|
||||||
cic-types~=0.1.0a15
|
cic-types~=0.2.0a6
|
||||||
confini>=0.4.1a1,<0.5.0
|
confini>=0.3.6rc4,<0.5.0
|
||||||
phonenumbers==8.12.12
|
phonenumbers==8.12.12
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
python-i18n[YAML]==0.3.9
|
python-i18n[YAML]==0.3.9
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
cic-eth[services]~=0.12.4a13
|
||||||
Faker==8.1.2
|
Faker==8.1.2
|
||||||
faker-e164==0.1.0
|
faker-e164==0.1.0
|
||||||
pytest==6.2.4
|
pytest==6.2.4
|
||||||
|
@ -4,8 +4,7 @@ import time
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
from cic_types.condiments import MetadataPointer
|
||||||
from chainlib.hash import strip_0x
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.statement import (filter_statement_transactions,
|
from cic_ussd.account.statement import (filter_statement_transactions,
|
||||||
@ -48,7 +47,7 @@ def test_generate(activated_account,
|
|||||||
generate(querying_party, None, sender_transaction)
|
generate(querying_party, None, sender_transaction)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
statement = get_cached_data(key)
|
statement = get_cached_data(key)
|
||||||
statement = json.loads(statement)
|
statement = json.loads(statement)
|
||||||
assert len(statement) == 1
|
assert len(statement) == 1
|
||||||
|
@ -5,24 +5,25 @@ import os
|
|||||||
# external imports
|
# external imports
|
||||||
import requests_mock
|
import requests_mock
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.metadata.base import MetadataRequestsHandler
|
from cic_ussd.metadata.base import UssdMetadataHandler
|
||||||
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
|
||||||
|
|
||||||
def test_metadata_requests_handler(activated_account,
|
def test_ussd_metadata_handler(activated_account,
|
||||||
init_cache,
|
init_cache,
|
||||||
load_config,
|
load_config,
|
||||||
person_metadata,
|
person_metadata,
|
||||||
setup_metadata_request_handler,
|
setup_metadata_request_handler,
|
||||||
setup_metadata_signer):
|
setup_metadata_signer):
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
cic_type = ':cic.person'
|
cic_type = MetadataPointer.PERSON
|
||||||
metadata_client = MetadataRequestsHandler(cic_type, identifier)
|
metadata_client = UssdMetadataHandler(cic_type, identifier)
|
||||||
assert metadata_client.cic_type == cic_type
|
assert metadata_client.cic_type == cic_type
|
||||||
assert metadata_client.engine == 'pgp'
|
assert metadata_client.engine == 'pgp'
|
||||||
assert metadata_client.identifier == identifier
|
assert metadata_client.identifier == identifier
|
||||||
@ -38,7 +39,5 @@ def test_metadata_requests_handler(activated_account,
|
|||||||
assert result.status_code == 200
|
assert result.status_code == 200
|
||||||
person_metadata.pop('digest')
|
person_metadata.pop('digest')
|
||||||
request_mocker.register_uri('GET', metadata_client.url, status_code=200, reason='OK', json=person_metadata)
|
request_mocker.register_uri('GET', metadata_client.url, status_code=200, reason='OK', json=person_metadata)
|
||||||
result = metadata_client.query()
|
result = metadata_client.query().json()
|
||||||
assert result == person_metadata
|
assert result == person_metadata
|
||||||
cached_metadata = metadata_client.get_cached_metadata()
|
|
||||||
assert json.loads(cached_metadata) == person_metadata
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -11,8 +11,8 @@ from cic_ussd.metadata import CustomMetadata
|
|||||||
|
|
||||||
|
|
||||||
def test_custom_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
def test_custom_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
||||||
cic_type = ':cic.custom'
|
cic_type = MetadataPointer.CUSTOM
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
custom_metadata_client = CustomMetadata(identifier)
|
custom_metadata_client = CustomMetadata(identifier)
|
||||||
assert custom_metadata_client.cic_type == cic_type
|
assert custom_metadata_client.cic_type == cic_type
|
||||||
assert custom_metadata_client.engine == 'pgp'
|
assert custom_metadata_client.engine == 'pgp'
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -11,8 +11,8 @@ from cic_ussd.metadata import PersonMetadata
|
|||||||
|
|
||||||
|
|
||||||
def test_person_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
def test_person_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
||||||
cic_type = ':cic.person'
|
cic_type = MetadataPointer.PERSON
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
person_metadata_client = PersonMetadata(identifier)
|
person_metadata_client = PersonMetadata(identifier)
|
||||||
assert person_metadata_client.cic_type == cic_type
|
assert person_metadata_client.cic_type == cic_type
|
||||||
assert person_metadata_client.engine == 'pgp'
|
assert person_metadata_client.engine == 'pgp'
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -12,8 +12,8 @@ from cic_ussd.metadata import PhonePointerMetadata
|
|||||||
|
|
||||||
|
|
||||||
def test_phone_pointer_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
def test_phone_pointer_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
||||||
cic_type = ':cic.phone'
|
cic_type = MetadataPointer.PHONE
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
phone_pointer_metadata = PhonePointerMetadata(identifier)
|
phone_pointer_metadata = PhonePointerMetadata(identifier)
|
||||||
assert phone_pointer_metadata.cic_type == cic_type
|
assert phone_pointer_metadata.cic_type == cic_type
|
||||||
assert phone_pointer_metadata.engine == 'pgp'
|
assert phone_pointer_metadata.engine == 'pgp'
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -11,8 +11,8 @@ from cic_ussd.metadata import PreferencesMetadata
|
|||||||
|
|
||||||
|
|
||||||
def test_preferences_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
def test_preferences_metadata(activated_account, load_config, setup_metadata_request_handler, setup_metadata_signer):
|
||||||
cic_type = ':cic.preferences'
|
cic_type = MetadataPointer.PREFERENCES
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
preferences_metadata_client = PreferencesMetadata(identifier)
|
preferences_metadata_client = PreferencesMetadata(identifier)
|
||||||
assert preferences_metadata_client.cic_type == cic_type
|
assert preferences_metadata_client.cic_type == cic_type
|
||||||
assert preferences_metadata_client.engine == 'pgp'
|
assert preferences_metadata_client.engine == 'pgp'
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_ussd.metadata.signer import Signer
|
|
||||||
|
|
||||||
|
|
||||||
def test_client(load_config, setup_metadata_signer, person_metadata):
|
|
||||||
signer = Signer()
|
|
||||||
gpg = signer.gpg
|
|
||||||
assert signer.key_data is not None
|
|
||||||
gpg.import_keys(key_data=signer.key_data)
|
|
||||||
gpg_keys = gpg.list_keys()
|
|
||||||
assert signer.get_operational_key() == gpg_keys[0]
|
|
||||||
shutil.rmtree(Signer.gpg_path)
|
|
@ -3,7 +3,7 @@ import json
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import get_cached_available_balance
|
from cic_ussd.account.balance import get_cached_available_balance
|
||||||
@ -58,7 +58,7 @@ def test_menu_processor(activated_account,
|
|||||||
token_symbol=token_symbol)
|
token_symbol=token_symbol)
|
||||||
|
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
adjusted_balance = 45931650.64654012
|
adjusted_balance = 45931650.64654012
|
||||||
cache_data(key, json.dumps(adjusted_balance))
|
cache_data(key, json.dumps(adjusted_balance))
|
||||||
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
||||||
|
@ -7,6 +7,7 @@ import time
|
|||||||
import i18n
|
import i18n
|
||||||
import requests_mock
|
import requests_mock
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -45,7 +46,7 @@ def test_handle_menu(activated_account,
|
|||||||
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
||||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||||
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cache_data(key, json.dumps(preferences))
|
cache_data(key, json.dumps(preferences))
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
menu_resp = handle_menu(pending_account, init_database)
|
menu_resp = handle_menu(pending_account, init_database)
|
||||||
|
@ -1,20 +1,18 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import json
|
import json
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.statement import generate, filter_statement_transactions
|
from cic_ussd.account.statement import filter_statement_transactions
|
||||||
from cic_ussd.account.transaction import transaction_actors
|
from cic_ussd.account.transaction import transaction_actors
|
||||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||||
from cic_ussd.db.models.account import Account
|
from cic_ussd.db.models.account import Account
|
||||||
from cic_ussd.error import AccountCreationDataNotFound
|
from cic_ussd.error import AccountCreationDataNotFound
|
||||||
from cic_ussd.metadata import PreferencesMetadata
|
|
||||||
|
|
||||||
|
|
||||||
# test imports
|
# test imports
|
||||||
@ -89,7 +87,7 @@ def test_balances_callback(activated_account, balances, celery_session_worker):
|
|||||||
[balances, activated_account.blockchain_address, status_code])
|
[balances, activated_account.blockchain_address, status_code])
|
||||||
s_balances_callback.apply_async().get()
|
s_balances_callback.apply_async().get()
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cached_balances = get_cached_data(key)
|
cached_balances = get_cached_data(key)
|
||||||
cached_balances = json.loads(cached_balances)
|
cached_balances = json.loads(cached_balances)
|
||||||
assert cached_balances == balances[0]
|
assert cached_balances == balances[0]
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import requests_mock
|
import requests_mock
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||||
@ -27,7 +27,7 @@ def test_query_person_metadata(activated_account,
|
|||||||
s_query_person_metadata = celery.signature(
|
s_query_person_metadata = celery.signature(
|
||||||
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
||||||
s_query_person_metadata.apply().get()
|
s_query_person_metadata.apply().get()
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
cached_person_metadata = get_cached_data(key)
|
cached_person_metadata = get_cached_data(key)
|
||||||
cached_person_metadata = json.loads(cached_person_metadata)
|
cached_person_metadata = json.loads(cached_person_metadata)
|
||||||
assert cached_person_metadata == person_metadata
|
assert cached_person_metadata == person_metadata
|
||||||
@ -46,7 +46,7 @@ def test_query_preferences_metadata(activated_account,
|
|||||||
query_preferences_metadata = celery.signature(
|
query_preferences_metadata = celery.signature(
|
||||||
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
||||||
query_preferences_metadata.apply().get()
|
query_preferences_metadata.apply().get()
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cached_preferences_metadata = get_cached_data(key)
|
cached_preferences_metadata = get_cached_data(key)
|
||||||
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
||||||
assert cached_preferences_metadata == preferences
|
assert cached_preferences_metadata == preferences
|
||||||
|
@ -4,6 +4,7 @@ import json
|
|||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.transaction import transaction_actors
|
from cic_ussd.account.transaction import transaction_actors
|
||||||
@ -38,7 +39,7 @@ def test_cache_statement(activated_account,
|
|||||||
transaction_result):
|
transaction_result):
|
||||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cached_statement = get_cached_data(key)
|
cached_statement = get_cached_data(key)
|
||||||
assert cached_statement is None
|
assert cached_statement is None
|
||||||
s_parse_transaction = celery.signature(
|
s_parse_transaction = celery.signature(
|
||||||
|
@ -3,6 +3,7 @@ import hashlib
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||||
@ -12,7 +13,7 @@ from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
|||||||
|
|
||||||
def test_cache_data(init_cache):
|
def test_cache_data(init_cache):
|
||||||
identifier = 'some_key'.encode()
|
identifier = 'some_key'.encode()
|
||||||
key = cache_data_key(identifier, ':testing')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
assert get_cached_data(key) is None
|
assert get_cached_data(key) is None
|
||||||
cache_data(key, json.dumps('some_value'))
|
cache_data(key, json.dumps('some_value'))
|
||||||
assert get_cached_data(key) is not None
|
assert get_cached_data(key) is not None
|
||||||
@ -20,10 +21,10 @@ def test_cache_data(init_cache):
|
|||||||
|
|
||||||
def test_cache_data_key():
|
def test_cache_data_key():
|
||||||
identifier = 'some_key'.encode()
|
identifier = 'some_key'.encode()
|
||||||
key = cache_data_key(identifier, ':testing')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
hash_object = hashlib.new("sha256")
|
hash_object = hashlib.new("sha256")
|
||||||
hash_object.update(identifier)
|
hash_object.update(identifier)
|
||||||
hash_object.update(':testing'.encode(encoding="utf-8"))
|
hash_object.update(':cic.person'.encode(encoding="utf-8"))
|
||||||
assert hash_object.digest().hex() == key
|
assert hash_object.digest().hex() == key
|
||||||
|
|
||||||
|
|
||||||
|
12
apps/cic-ussd/tests/fixtures/account.py
vendored
12
apps/cic-ussd/tests/fixtures/account.py
vendored
@ -4,7 +4,7 @@ import random
|
|||||||
|
|
||||||
# external accounts
|
# external accounts
|
||||||
import pytest
|
import pytest
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -56,7 +56,7 @@ def cache_account_creation_data(init_cache, account_creation_data):
|
|||||||
def cache_balances(activated_account, balances, init_cache):
|
def cache_balances(activated_account, balances, init_cache):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
balances = json.dumps(balances[0])
|
balances = json.dumps(balances[0])
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, balances)
|
cache_data(key, balances)
|
||||||
|
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ def cache_balances(activated_account, balances, init_cache):
|
|||||||
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||||
chain_str = Chain.spec.__str__()
|
chain_str = Chain.spec.__str__()
|
||||||
data = json.dumps(default_token_data)
|
data = json.dumps(default_token_data)
|
||||||
key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||||
cache_data(key, data)
|
cache_data(key, data)
|
||||||
|
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
|||||||
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
person = json.dumps(person_metadata)
|
person = json.dumps(person_metadata)
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
cache_data(key, person)
|
cache_data(key, person)
|
||||||
|
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ def cache_person_metadata(activated_account, init_cache, person_metadata):
|
|||||||
def cache_preferences(activated_account, init_cache, preferences):
|
def cache_preferences(activated_account, init_cache, preferences):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
preferences = json.dumps(preferences)
|
preferences = json.dumps(preferences)
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cache_data(key, preferences)
|
cache_data(key, preferences)
|
||||||
|
|
||||||
|
|
||||||
@ -88,7 +88,7 @@ def cache_preferences(activated_account, init_cache, preferences):
|
|||||||
def cache_statement(activated_account, init_cache, statement):
|
def cache_statement(activated_account, init_cache, statement):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
statement = json.dumps(statement)
|
statement = json.dumps(statement)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cache_data(key, statement)
|
cache_data(key, statement)
|
||||||
|
|
||||||
|
|
||||||
|
6
apps/cic-ussd/tests/fixtures/config.py
vendored
6
apps/cic-ussd/tests/fixtures/config.py
vendored
@ -41,11 +41,7 @@ def init_state_machine(load_config):
|
|||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def load_chain_spec(load_config):
|
def load_chain_spec(load_config):
|
||||||
chain_spec = ChainSpec(
|
chain_spec = ChainSpec.from_chain_str(load_config.get('CHAIN_SPEC'))
|
||||||
common_name=load_config.get('CIC_COMMON_NAME'),
|
|
||||||
engine=load_config.get('CIC_ENGINE'),
|
|
||||||
network_id=load_config.get('CIC_NETWORK_ID')
|
|
||||||
)
|
|
||||||
Chain.spec = chain_spec
|
Chain.spec = chain_spec
|
||||||
|
|
||||||
|
|
||||||
|
20
apps/cic-ussd/tests/fixtures/metadata.py
vendored
20
apps/cic-ussd/tests/fixtures/metadata.py
vendored
@ -6,33 +6,19 @@ import tempfile
|
|||||||
# external imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.processor import generate_metadata_pointer
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.metadata import Metadata, PersonMetadata, PhonePointerMetadata, PreferencesMetadata
|
from cic_ussd.metadata import PersonMetadata, PhonePointerMetadata, PreferencesMetadata
|
||||||
from cic_ussd.metadata.signer import Signer
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def setup_metadata_signer(load_config):
|
|
||||||
temp_dir = tempfile.mkdtemp(dir='/tmp')
|
|
||||||
logg.debug(f'Created temp dir: {temp_dir}')
|
|
||||||
Signer.gpg_path = temp_dir
|
|
||||||
Signer.gpg_passphrase = load_config.get('PGP_PASSPHRASE')
|
|
||||||
Signer.key_file_path = os.path.join(load_config.get('PGP_KEYS_PATH'), load_config.get('PGP_PRIVATE_KEYS'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def setup_metadata_request_handler(load_config):
|
|
||||||
Metadata.base_url = load_config.get('CIC_META_URL')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def account_phone_pointer(activated_account):
|
def account_phone_pointer(activated_account):
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
return generate_metadata_pointer(identifier, ':cic.phone')
|
return generate_metadata_pointer(identifier, MetadataPointer.PERSON)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
|
26
apps/contract-migration/1_deploy_contract_root.sh
Normal file
26
apps/contract-migration/1_deploy_contract_root.sh
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. util.sh
|
||||||
|
|
||||||
|
set -a
|
||||||
|
|
||||||
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||||
|
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
must_eth_rpc
|
||||||
|
|
||||||
|
# Deploy address declarator registry
|
||||||
|
>&2 echo -e "\033[;96mDeploy address declarator contract\033[;39m"
|
||||||
|
DEV_ADDRESS_DECLARATOR=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG $DEV_DECLARATOR_DESCRIPTION`
|
||||||
|
|
||||||
|
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
set +a
|
||||||
|
set +e
|
64
apps/contract-migration/2_deploy_contract_instance.sh
Normal file
64
apps/contract-migration/2_deploy_contract_instance.sh
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. util.sh
|
||||||
|
|
||||||
|
set -a
|
||||||
|
|
||||||
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||||
|
must_eth_rpc
|
||||||
|
|
||||||
|
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||||
|
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Deploy contract registry contract
|
||||||
|
>&2 echo -e "\033[;96mDeploy contract registry contract\033[;39m"
|
||||||
|
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy $fee_price_arg -i $CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --identifier DefaultToken --address-declarator $DEV_ADDRESS_DECLARATOR -p $RPC_PROVIDER $DEV_DEBUG_FLAG -s -u -w`
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mAdd contract registry record to itself\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier ContractRegistry $CIC_REGISTRY_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mAdd address declarator record to contract registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AddressDeclarator $DEV_ADDRESS_DECLARATOR`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
# Deploy transfer authorization contact
|
||||||
|
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
|
||||||
|
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG`
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
# Deploy token index contract
|
||||||
|
>&2 echo -e "\033[;96mDeploy token symbol index contract\033[;39m"
|
||||||
|
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR`
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mAdd token symbol index record to contract registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
#>&2 echo "add reserve token to token index"
|
||||||
|
#eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
||||||
|
|
||||||
|
|
||||||
|
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
|
||||||
|
set +a
|
||||||
|
set +e
|
136
apps/contract-migration/3_deploy_token.sh
Normal file
136
apps/contract-migration/3_deploy_token.sh
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. util.sh
|
||||||
|
|
||||||
|
set -a
|
||||||
|
|
||||||
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||||
|
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
have_default_token=1
|
||||||
|
token_feedback_display_string='token'
|
||||||
|
|
||||||
|
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||||
|
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||||
|
must_eth_rpc
|
||||||
|
|
||||||
|
|
||||||
|
function _deploy_token_defaults {
|
||||||
|
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||||
|
>&2 echo -e "\033[;33mtoken symbol not set, setting defaults for type $TOKEN_TYPE\033[;39m"
|
||||||
|
TOKEN_SYMBOL=$1
|
||||||
|
TOKEN_NAME=$2
|
||||||
|
elif [ -z "$TOKEN_NAME" ]; then
|
||||||
|
>&2 echo -e "\033[;33mtoken name not set, setting same as symbol for type $TOKEN_TYPE\033[;39m"
|
||||||
|
TOKEN_NAME=$TOKEN_SYMBOL
|
||||||
|
fi
|
||||||
|
TOKEN_DECIMALS=${TOKEN_DECIMALS:-6}
|
||||||
|
|
||||||
|
default_token_registered=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw DefaultToken`
|
||||||
|
if [ $default_token_registered == '0000000000000000000000000000000000000000' ]; then
|
||||||
|
>&2 echo -e "\033[;33mFound no existing default token in token registry"
|
||||||
|
have_default_token=''
|
||||||
|
token_feedback_display_string='default token'
|
||||||
|
fi
|
||||||
|
>&2 echo -e "\033[;96mdeploying $token_feedback_display_string ..."
|
||||||
|
>&2 echo -e "Type: $TOKEN_TYPE"
|
||||||
|
>&2 echo -e "Name: $TOKEN_NAME"
|
||||||
|
>&2 echo -e "Symbol: $TOKEN_SYMBOL"
|
||||||
|
>&2 echo -e "Decimals: $TOKEN_DECIMALS\033[;39m"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function deploy_token_giftable_erc20_token() {
|
||||||
|
_deploy_token_defaults "GFT" "Giftable Token"
|
||||||
|
TOKEN_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG`
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function deploy_token_erc20_demurrage_token() {
|
||||||
|
_deploy_token_defaults "DET" "Demurrage Token"
|
||||||
|
TOKEN_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
|
||||||
|
}
|
||||||
|
|
||||||
|
function deploy_accounts_index() {
|
||||||
|
# Deploy accounts index contact
|
||||||
|
>&2 echo -e "\033[;96mDeploy accounts index contract for token $TOKEN_SYMBOL\033[;39m"
|
||||||
|
DEV_ACCOUNTS_INDEX_ADDRESS=`okota-accounts-index-deploy $gas_price_arg -u -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR --token-address $1`
|
||||||
|
|
||||||
|
if [ -z "$have_default_token" ]; then
|
||||||
|
>&2 echo -e "\033[;96mAdd acccounts index record for default token to contract registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AccountRegistry $DEV_ACCOUNTS_INDEX_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function deploy_minter_faucet() {
|
||||||
|
FAUCET_AMOUNT=${FAUCET_AMOUNT:-0}
|
||||||
|
|
||||||
|
# Token faucet contract
|
||||||
|
>&2 echo -e "\033[;96mDeploy token faucet contract for token $TOKEN_SYMBOL\033[;39m"
|
||||||
|
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||||
|
faucet_address=`sarafu-faucet-deploy $fee_price_arg -s -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --account-index-address $accounts_index_address $1`
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mSet token faucet amount to $FAUCET_AMOUNT\033[;39m"
|
||||||
|
r=`sarafu-faucet-set $fee_price_arg -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $faucet_address $DEV_DEBUG_FLAG -s --fee-limit 100000 $FAUCET_AMOUNT`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
if [ -z $have_default_token ]; then
|
||||||
|
>&2 echo -e "\033[;96mRegister faucet in registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier Faucet $faucet_address`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
fi
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mSet faucet as token minter\033[;39m"
|
||||||
|
r=`giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $TOKEN_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG $faucet_address`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
TOKEN_TYPE=${TOKEN_TYPE:-giftable_erc20_token}
|
||||||
|
deploy_token_${TOKEN_TYPE}
|
||||||
|
|
||||||
|
if [ -z "$have_default_token" ]; then
|
||||||
|
>&2 echo -e "\033[;96mAdd default token to contract registry\033[;39m"
|
||||||
|
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier DefaultToken $TOKEN_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mAdd token symbol $TOKEN_SYMBOL to token address $TOKEN_ADDRESS mapping to token index\033[;39m"
|
||||||
|
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
|
||||||
|
r=`eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $token_index_address $TOKEN_ADDRESS`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
TOKEN_MINT_AMOUNT=${TOKEN_MINT_AMOUNT:-${DEV_TOKEN_MINT_AMOUNT}}
|
||||||
|
>&2 echo -e "\033[;96mMinting $TOKEN_MINT_AMOUNT tokens\033[;39m"
|
||||||
|
r=`giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -u $DEV_DEBUG_FLAG -s -w -e $TOKEN_ADDRESS "$DEV_TOKEN_MINT_AMOUNT"`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
# Create accounts index for default token
|
||||||
|
deploy_accounts_index $TOKEN_ADDRESS
|
||||||
|
|
||||||
|
# Connect a minter component if defined
|
||||||
|
TOKEN_MINTER_MODE=${TOKEN_MINTER_MODE:-"faucet"}
|
||||||
|
if [ -z "$TOKEN_MINTER_MODE" ]; then
|
||||||
|
>&2 echo -e "\033[;33mNo token minter mode set.\033[;39m"
|
||||||
|
else
|
||||||
|
deploy_minter_${TOKEN_MINTER_MODE} $TOKEN_ADDRESS
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
set +e
|
||||||
|
set +a
|
67
apps/contract-migration/4_init_custodial.sh
Normal file
67
apps/contract-migration/4_init_custodial.sh
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. util.sh
|
||||||
|
|
||||||
|
set -a
|
||||||
|
|
||||||
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||||
|
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||||
|
must_eth_rpc
|
||||||
|
|
||||||
|
# get required addresses from registries
|
||||||
|
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
|
||||||
|
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||||
|
reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address $DEV_DEBUG_FLAG --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
||||||
|
|
||||||
|
|
||||||
|
REDIS_HOST_CALLBACK=${REDIS_HOST_CALLBACK:-$REDIS_HOST}
|
||||||
|
REDIS_PORT_CALLBACK=${REDIS_PORT_CALLBACK:-$REDIS_PORT}
|
||||||
|
>&2 echo -e "\033[;96mcreate account for gas gifter\033[;39m"
|
||||||
|
gas_gifter=`cic-eth-create --redis-timeout 120 $DEV_DEBUG_FLAG --redis-host-callback $REDIS_HOST_CALLBACK --redis-port-callback $REDIS_PORT_CALLBACK --no-register`
|
||||||
|
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $gas_gifter
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mcreate account for accounts index writer\033[;39m"
|
||||||
|
accounts_index_writer=`cic-eth-create --redis-timeout 120 $DEV_DEBUG_FLAG --redis-host-callback $REDIS_HOST_CALLBACK --redis-port-callback $REDIS_PORT_CALLBACK --no-register`
|
||||||
|
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $accounts_index_writer
|
||||||
|
|
||||||
|
|
||||||
|
# Assign system writer for accounts index
|
||||||
|
>&2 echo -e "\033[;96mEnable accounts index writer $accounts_index_writer to write to accounts index contract at $accounts_index_address\033[;39m"
|
||||||
|
r=`eth-accounts-index-writer -s -w -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $accounts_index_address $DEV_DEBUG_FLAG $accounts_index_writer`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
# Transfer gas to custodial gas provider adddress
|
||||||
|
>&2 echo -e "\033[;96mGift gas to gas gifter $gas_gifter\033[;39m"
|
||||||
|
echo "eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT"
|
||||||
|
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mgift gas to accounts index owner $accounts_index_writer\033[;39m"
|
||||||
|
# for now we are using the same key for both
|
||||||
|
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||||
|
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $accounts_index_writer $DEV_GAS_AMOUNT`
|
||||||
|
add_pending_tx_hash $r
|
||||||
|
|
||||||
|
|
||||||
|
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
|
||||||
|
cic-eth-ctl -vv -i $CHAIN_SPEC unlock INIT
|
||||||
|
cic-eth-ctl -vv -i $CHAIN_SPEC unlock SEND
|
||||||
|
cic-eth-ctl -vv -i $CHAIN_SPEC unlock QUEUE
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
set +e
|
||||||
|
set +a
|
27
apps/contract-migration/5_data_seeding.sh
Normal file
27
apps/contract-migration/5_data_seeding.sh
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. util.sh
|
||||||
|
|
||||||
|
set -a
|
||||||
|
|
||||||
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
|
|
||||||
|
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||||
|
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||||
|
must_eth_rpc
|
||||||
|
|
||||||
|
|
||||||
|
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||||
|
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mEnable default wallet $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER to write to accounts index contract at $accounts_index_address\033[;39m"
|
||||||
|
r=`eth-accounts-index-writer -s -w -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $accounts_index_address $DEV_DEBUG_FLAG $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
|
||||||
|
add_pending_tx_hash $r
|
@ -1,53 +1,44 @@
|
|||||||
# Contract Migration
|
# CIC-stack system bootstrap scripts
|
||||||
|
|
||||||
Common docker artifacts and bootstrap scripts
|
|
||||||
|
|
||||||
## How this repo works
|
|
||||||
|
|
||||||
This repo builds contracts and deploys them to a chain
|
## 1. Deploy global contracts.
|
||||||
|
|
||||||
First, bring up an eth evm provider
|
Global contracts are contracts that may or may not be used to contribute to a data store intended for consumption across instances.
|
||||||
```
|
|
||||||
docker-compose up eth
|
|
||||||
```
|
|
||||||
|
|
||||||
Now build this repo's image and run it against the 'eth' service (ganache, for example). You will need to bind to the docker-compose network (cic-network) and mount the special contract output folder that dependent services use to get deployed contract addresses.
|
In the current version of the scripts, the only contract deployed is the `AddressDeclarator`. Also, in the current version, the `AddressDeclarator` is required as a storage backend for some of the instance contracts.
|
||||||
|
|
||||||
Here is how to do that in one shot:
|
|
||||||
```
|
|
||||||
docker build -t registry.gitlab.com/grassrootseconomics/cic-docker-internal -f docker/ . && docker run --env ETH_PROVIDER=http://eth:8545 --net cic-network -v cic-docker-internal_contract-config:/tmp/cic/config --rm -it registry.gitlab.com/grassrootseconomics/cic-docker-internal reset.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Stop the containers and bring down the services with
|
## 2. Deploy instance contracts.
|
||||||
```
|
|
||||||
docker-compose down
|
|
||||||
```
|
|
||||||
|
|
||||||
If you want a fresh start to the dev environment then bring down the services and delete their associated volumes with
|
Instance contracts are contracts whose contents are limited to the context of a single custodial engine system.
|
||||||
|
|
||||||
```
|
This includes a registry of contracts used by the engine, as well as registry contracts for user accounts and tokens.
|
||||||
docker-compose down -v
|
|
||||||
```
|
|
||||||
|
|
||||||
A goal is to go through all of these containers and create a default non-root user a la:
|
|
||||||
https://vsupalov.com/docker-shared-permissions/
|
|
||||||
|
|
||||||
## Tips and Tricks
|
## 3. Deploy token.
|
||||||
|
|
||||||
Sometimes you just want to hold a container open in docker compose so you can exec into it and poke around. Replace "command" with
|
Deploys a CIC token, adding it to the token registry.
|
||||||
|
|
||||||
```
|
The first token deployed becomes the default token of the instance.
|
||||||
command:
|
|
||||||
- /bin/sh
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
tail -f /dev/null
|
|
||||||
```
|
|
||||||
then
|
|
||||||
|
|
||||||
```
|
In the current version of the scripts, two token types may be deployed; [`giftable_erc20_token`](https://gitlab.com/cicnet/eth-erc20) and [`erc20_demurrage_token`](https://gitlab.com/cicnet/erc20-demurrage-token).
|
||||||
docker exec -it [IMAGE_NANE] sh
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
This step may be run multiple times, as long as the token symbol is different from all previously deployed tokens.
|
||||||
|
|
||||||
|
|
||||||
|
## 4. Initialize custodial engine.
|
||||||
|
|
||||||
|
Adds system accounts to the custodial engine, and unlocks the initialization seal. After this step, the custodial system is ready to use.
|
||||||
|
|
||||||
|
|
||||||
|
## Services dependency graph
|
||||||
|
|
||||||
|
1. evm
|
||||||
|
2. bootstrap runlevel 1
|
||||||
|
3. bootstrap runlevel 2
|
||||||
|
4. bootstrap runlevel 3
|
||||||
|
5. redis
|
||||||
|
6. postgres
|
||||||
|
7. cic-eth-tasker
|
||||||
|
8. boostrap runlevel 4
|
||||||
|
53
apps/contract-migration/README_bloxberg.md
Normal file
53
apps/contract-migration/README_bloxberg.md
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# Contract Migration
|
||||||
|
|
||||||
|
Common docker artifacts and bootstrap scripts
|
||||||
|
|
||||||
|
## How this repo works
|
||||||
|
|
||||||
|
This repo builds contracts and deploys them to a chain
|
||||||
|
|
||||||
|
First, bring up an eth evm provider
|
||||||
|
```
|
||||||
|
docker-compose up eth
|
||||||
|
```
|
||||||
|
|
||||||
|
Now build this repo's image and run it against the 'eth' service (ganache, for example). You will need to bind to the docker-compose network (cic-network) and mount the special contract output folder that dependent services use to get deployed contract addresses.
|
||||||
|
|
||||||
|
Here is how to do that in one shot:
|
||||||
|
```
|
||||||
|
docker build -t registry.gitlab.com/grassrootseconomics/cic-docker-internal -f docker/ . && docker run --env ETH_PROVIDER=http://eth:8545 --net cic-network -v cic-docker-internal_contract-config:/tmp/cic/config --rm -it registry.gitlab.com/grassrootseconomics/cic-docker-internal reset.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Stop the containers and bring down the services with
|
||||||
|
```
|
||||||
|
docker-compose down
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want a fresh start to the dev environment then bring down the services and delete their associated volumes with
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose down -v
|
||||||
|
```
|
||||||
|
|
||||||
|
A goal is to go through all of these containers and create a default non-root user a la:
|
||||||
|
https://vsupalov.com/docker-shared-permissions/
|
||||||
|
|
||||||
|
## Tips and Tricks
|
||||||
|
|
||||||
|
Sometimes you just want to hold a container open in docker compose so you can exec into it and poke around. Replace "command" with
|
||||||
|
|
||||||
|
```
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
tail -f /dev/null
|
||||||
|
```
|
||||||
|
then
|
||||||
|
|
||||||
|
```
|
||||||
|
docker exec -it [IMAGE_NANE] sh
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
@ -8,11 +8,16 @@ else
|
|||||||
mkdir -p $DEV_DATA_DIR
|
mkdir -p $DEV_DATA_DIR
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# By default configuration values generated from previous runs will be used in subsequent invocations
|
||||||
|
# Setting the config reset
|
||||||
if [ -z $DEV_CONFIG_RESET ]; then
|
if [ -z $DEV_CONFIG_RESET ]; then
|
||||||
if [ -f ${DEV_DATA_DIR}/env_reset ]; then
|
if [ -f $DEV_DATA_DIR/env_reset ]; then
|
||||||
>&2 echo "importing existing configuration values from ${DEV_DATA_DIR}/env_reset"
|
>&2 echo -e "\033[;96mimporting existing configuration values from ${DEV_DATA_DIR}/env_reset\033[;39m"
|
||||||
. ${DEV_DATA_DIR}/env_reset
|
. ${DEV_DATA_DIR}/env_reset
|
||||||
fi
|
fi
|
||||||
|
else
|
||||||
|
>&2 echo -e "\033[;33mGenerating scratch configuration\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Handle wallet
|
# Handle wallet
|
||||||
@ -21,7 +26,8 @@ if [ ! -f $WALLET_KEY_FILE ]; then
|
|||||||
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
|
#export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
|
||||||
|
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
|
||||||
|
|
||||||
# Wallet dependent variable defaults
|
# Wallet dependent variable defaults
|
||||||
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||||
@ -31,16 +37,7 @@ export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
|||||||
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||||
|
|
||||||
|
|
||||||
# Legacy variable defaults
|
|
||||||
|
|
||||||
|
|
||||||
# Migration variable processing
|
# Migration variable processing
|
||||||
confini-dump --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||||
echo "export CIC_TRUST_ADDRESS=$CIC_TRUST_ADDRESS
|
|
||||||
export CIC_DEFAULT_TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
|
|
||||||
export WALLET_KEY_FILE=$WALLET_KEY_FILE
|
|
||||||
" >> ${DEV_DATA_DIR}/env_reset
|
|
||||||
|
|
||||||
cat ${DEV_DATA_DIR}/env_reset
|
|
||||||
|
|
||||||
set +a
|
set +a
|
||||||
|
@ -1,13 +1,26 @@
|
|||||||
[dev]
|
[dev]
|
||||||
eth_account_contract_deployer =
|
eth_account_contract_deployer =
|
||||||
eth_account_reserve_minter =
|
token_mint_amount = 10000000000000000000000000000000000
|
||||||
eth_account_accounts_index_writer =
|
|
||||||
reserve_amount = 10000000000000000000000000000000000
|
|
||||||
faucet_amount = 0
|
|
||||||
gas_amount = 100000000000000000000000
|
gas_amount = 100000000000000000000000
|
||||||
token_amount = 100000000000000000000000
|
|
||||||
eth_gas_price =
|
eth_gas_price =
|
||||||
data_dir =
|
data_dir =
|
||||||
pip_extra_index_url =
|
address_declarator =
|
||||||
eth_provider_host =
|
declarator_description = 0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||||
eth_provider_port =
|
|
||||||
|
[chain]
|
||||||
|
spec =
|
||||||
|
|
||||||
|
[rpc]
|
||||||
|
provider =
|
||||||
|
|
||||||
|
[celery]
|
||||||
|
broker_url =
|
||||||
|
result_url =
|
||||||
|
|
||||||
|
[redis]
|
||||||
|
host =
|
||||||
|
port =
|
||||||
|
|
||||||
|
[cic]
|
||||||
|
registry_address =
|
||||||
|
trust_address =
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# syntax = docker/dockerfile:1.2
|
ARG DEV_DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e
|
|
||||||
|
FROM $DEV_DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
@ -9,8 +10,6 @@ RUN echo 'deb-src http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main'
|
|||||||
RUN cat /etc/apt/sources.list.d/ethereum.list
|
RUN cat /etc/apt/sources.list.d/ethereum.list
|
||||||
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
|
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
|
||||||
|
|
||||||
#RUN apt-get install solc
|
|
||||||
|
|
||||||
RUN mkdir -vp /usr/local/etc/cic
|
RUN mkdir -vp /usr/local/etc/cic
|
||||||
|
|
||||||
ENV CONFINI_DIR /usr/local/etc/cic/
|
ENV CONFINI_DIR /usr/local/etc/cic/
|
||||||
@ -18,29 +17,20 @@ ENV CONFINI_DIR /usr/local/etc/cic/
|
|||||||
|
|
||||||
COPY config_template/ /usr/local/etc/cic/
|
COPY config_template/ /usr/local/etc/cic/
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
COPY override_requirements.txt .
|
|
||||||
|
RUN apt-get install libffi-dev
|
||||||
|
|
||||||
ARG pip_index_url=https://pypi.org/simple
|
ARG pip_index_url=https://pypi.org/simple
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
ARG EXTRA_PIP_ARGS=""
|
ARG EXTRA_PIP_ARGS=""
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||||
ARG pip_trusted_host=pypi.org
|
ARG pip_trusted_host=pypi.org
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
pip install --index-url https://pypi.org/simple \
|
|
||||||
--pre \
|
--pre \
|
||||||
--force-reinstall \
|
--force-reinstall \
|
||||||
--trusted-host $pip_trusted_host \
|
--no-cache \
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
|
||||||
pip install --index-url https://pypi.org/simple \
|
|
||||||
--force-reinstall \
|
|
||||||
--pre \
|
|
||||||
--trusted-host $pip_trusted_host \
|
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
|
||||||
-r override_requirements.txt
|
|
||||||
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN chmod +x *.sh
|
RUN chmod +x *.sh
|
||||||
|
@ -1,62 +0,0 @@
|
|||||||
SYNCER_LOOP_INTERVAL
|
|
||||||
SSL_ENABLE_CLIENT
|
|
||||||
SSL_CERT_FILE
|
|
||||||
SSL_KEY_FILE
|
|
||||||
SSL_PASSWORD
|
|
||||||
SSL_CA_FILE
|
|
||||||
BANCOR_DIR
|
|
||||||
REDIS_HOST
|
|
||||||
REDIS_PORT
|
|
||||||
REDIS_DB
|
|
||||||
PGP_EXPORTS_DIR
|
|
||||||
PGP_PRIVATEKEY_FILE
|
|
||||||
PGP_PASSPHRASE
|
|
||||||
DATABASE_USER
|
|
||||||
DATABASE_PASSWORD
|
|
||||||
DATABASE_NAME
|
|
||||||
DATABASE_HOST
|
|
||||||
DATABASE_PORT
|
|
||||||
DATABASE_ENGINE
|
|
||||||
DATABASE_DRIVER
|
|
||||||
DATABASE_DEBUG
|
|
||||||
TASKS_AFRICASTALKING
|
|
||||||
TASKS_SMS_DB
|
|
||||||
TASKS_LOG
|
|
||||||
TASKS_TRACE_QUEUE_STATUS
|
|
||||||
TASKS_TRANSFER_CALLBACKS
|
|
||||||
DEV_MNEMONIC
|
|
||||||
DEV_ETH_RESERVE_ADDRESS
|
|
||||||
DEV_ETH_ACCOUNTS_INDEX_ADDRESS
|
|
||||||
DEV_ETH_RESERVE_AMOUNT
|
|
||||||
DEV_ETH_ACCOUNT_BANCOR_DEPLOYER
|
|
||||||
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
|
||||||
DEV_ETH_ACCOUNT_GAS_PROVIDER
|
|
||||||
DEV_ETH_ACCOUNT_RESERVE_OWNER
|
|
||||||
DEV_ETH_ACCOUNT_RESERVE_MINTER
|
|
||||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_OWNER
|
|
||||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
|
||||||
DEV_ETH_ACCOUNT_SARAFU_OWNER
|
|
||||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
|
||||||
DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
|
||||||
DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
|
||||||
DEV_ETH_SARAFU_TOKEN_NAME
|
|
||||||
DEV_ETH_SARAFU_TOKEN_SYMBOL
|
|
||||||
DEV_ETH_SARAFU_TOKEN_DECIMALS
|
|
||||||
DEV_ETH_SARAFU_TOKEN_ADDRESS
|
|
||||||
DEV_PGP_PUBLICKEYS_ACTIVE_FILE
|
|
||||||
DEV_PGP_PUBLICKEYS_TRUSTED_FILE
|
|
||||||
DEV_PGP_PUBLICKEYS_ENCRYPT_FILE
|
|
||||||
CIC_REGISTRY_ADDRESS
|
|
||||||
CIC_APPROVAL_ESCROW_ADDRESS
|
|
||||||
CIC_TOKEN_INDEX_ADDRESS
|
|
||||||
CIC_ACCOUNTS_INDEX_ADDRESS
|
|
||||||
CIC_DECLARATOR_ADDRESS
|
|
||||||
CIC_CHAIN_SPEC
|
|
||||||
ETH_PROVIDER
|
|
||||||
ETH_ABI_DIR
|
|
||||||
SIGNER_SOCKET_PATH
|
|
||||||
SIGNER_SECRET
|
|
||||||
SIGNER_PROVIDER
|
|
||||||
CELERY_BROKER_URL
|
|
||||||
CELERY_RESULT_URL
|
|
||||||
META_PROVIDER
|
|
@ -1,4 +0,0 @@
|
|||||||
#eth-contract-registry==0.6.3a2
|
|
||||||
#erc20-demurrage-token==0.0.2a3
|
|
||||||
#eth-address-index==0.1.1a12
|
|
||||||
|
|
@ -1,15 +1,14 @@
|
|||||||
cic-eth[tools]==0.12.4a8
|
cic-eth[tools]==0.12.4a13
|
||||||
chainlib-eth>=0.0.9rc1,<0.1.0
|
chainlib-eth>=0.0.10a5,<0.1.0
|
||||||
chainlib==0.0.9rc1,<0.1.0
|
chainlib==0.0.10a3,<0.1.0
|
||||||
eth-erc20>=0.1.2a3,<0.2.0
|
eth-erc20>=0.1.2a3,<0.2.0
|
||||||
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
||||||
#eth-accounts-index>=0.1.2a2,<0.2.0
|
|
||||||
eth-address-index>=0.2.4a1,<0.3.0
|
eth-address-index>=0.2.4a1,<0.3.0
|
||||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||||
erc20-faucet>=0.3.2a2,<0.4.0
|
erc20-faucet>=0.3.2a2,<0.4.0
|
||||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||||
confini>=0.4.2rc3,<1.0.0
|
confini>=0.4.2rc3,<1.0.0
|
||||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||||
eth-token-index>=0.2.4a1,<=0.3.0
|
eth-token-index>=0.2.4a1,<=0.3.0
|
||||||
okota>=0.2.4a5,<0.3.0
|
okota>=0.2.4a15,<0.3.0
|
||||||
|
@ -1,127 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -a
|
|
||||||
|
|
||||||
. ${DEV_DATA_DIR}/env_reset
|
|
||||||
|
|
||||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
|
||||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
|
||||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Wait for the backend to be up, if we know where it is.
|
|
||||||
if [ -z "${RPC_PROVIDER}" ]; then
|
|
||||||
echo "\$RPC_PROVIDER not set!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
unset CONFINI_DIR
|
|
||||||
|
|
||||||
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
|
||||||
IFS=: read -a p <<< "$RPC_PROVIDER"
|
|
||||||
read -i "/" rpc_provider_port <<< "${p[2]}"
|
|
||||||
rpc_provider_host=${p[1]:2}
|
|
||||||
echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
|
|
||||||
./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port" -t $WAIT_FOR_TIMEOUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$TOKEN_TYPE" == "giftable_erc20_token" ]; then
|
|
||||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
|
||||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
|
||||||
TOKEN_SYMBOL="GFT"
|
|
||||||
TOKEN_NAME="Giftable Token"
|
|
||||||
elif [ -z "$TOKEN_NAME" ]; then
|
|
||||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
|
||||||
TOKEN_NAME=$TOKEN_SYMBOL
|
|
||||||
fi
|
|
||||||
>&2 echo deploying default token $TOKEN_TYPE
|
|
||||||
echo giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
|
|
||||||
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
|
|
||||||
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
|
|
||||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
|
||||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
|
||||||
TOKEN_SYMBOL="DET"
|
|
||||||
TOKEN_NAME="Demurrage Token"
|
|
||||||
elif [ -z "$TOKEN_NAME" ]; then
|
|
||||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
|
||||||
TOKEN_NAME=$TOKEN_SYMBOL
|
|
||||||
fi
|
|
||||||
>&2 echo deploying token $TOKEN_TYPE
|
|
||||||
if [ -z $TOKEN_SINK_ADDRESS ]; then
|
|
||||||
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
|
|
||||||
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
|
|
||||||
else
|
|
||||||
>&2 echo unknown token type $TOKEN_TYPE
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
|
|
||||||
giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
|
|
||||||
|
|
||||||
# Deploy address declarator registry
|
|
||||||
>&2 echo "deploy address declarator contract"
|
|
||||||
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
|
|
||||||
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv $declarator_description`
|
|
||||||
|
|
||||||
>&2 echo "deploy contract registry contract"
|
|
||||||
#CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $RPC_PROVIDER -vv -s -u -w`
|
|
||||||
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --address-declarator $DEV_DECLARATOR_ADDRESS -p $RPC_PROVIDER -vv -s -u -w`
|
|
||||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
|
|
||||||
|
|
||||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
|
|
||||||
|
|
||||||
>&2 echo "deploy account index contract"
|
|
||||||
#DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w`
|
|
||||||
DEV_ACCOUNT_INDEX_ADDRESS=`okota-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w --address-declarator $DEV_DECLARATOR_ADDRESS --token-address $DEV_RESERVE_ADDRESS`
|
|
||||||
#>&2 echo "add deployer address as account index writer"
|
|
||||||
#eth-accounts-index-writer $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
|
||||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
|
|
||||||
|
|
||||||
# Deploy transfer authorization contact
|
|
||||||
>&2 echo "deploy transfer auth contract"
|
|
||||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
|
||||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
|
|
||||||
|
|
||||||
# Deploy token index contract
|
|
||||||
>&2 echo "deploy token index contract"
|
|
||||||
#DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
|
||||||
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --address-declarator $DEV_DECLARATOR_ADDRESS`
|
|
||||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
|
|
||||||
>&2 echo "add reserve token to token index"
|
|
||||||
eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
|
||||||
|
|
||||||
# Sarafu faucet contract
|
|
||||||
>&2 echo "deploy token faucet contract"
|
|
||||||
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
|
|
||||||
|
|
||||||
>&2 echo "set token faucet amount"
|
|
||||||
sarafu-faucet-set $fee_price_arg -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
|
|
||||||
|
|
||||||
>&2 echo "register faucet in registry"
|
|
||||||
eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
|
|
||||||
|
|
||||||
>&2 echo "set faucet as token minter"
|
|
||||||
giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv $DEV_FAUCET_ADDRESS
|
|
||||||
|
|
||||||
|
|
||||||
#echo "export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL" >> ${DEV_DATA_DIR}/env_reset
|
|
||||||
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
|
||||||
|
|
||||||
echo "Writing env_reset file ..."
|
|
||||||
|
|
||||||
echo "export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS
|
|
||||||
export CIC_DEFAULT_TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
|
|
||||||
export TOKEN_NAME=$TOKEN_NAME
|
|
||||||
" >> "${DEV_DATA_DIR}"/env_reset
|
|
||||||
|
|
||||||
set +a
|
|
||||||
set +e
|
|
||||||
|
|
||||||
exec "$@"
|
|
@ -1,31 +1,54 @@
|
|||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
|
|
||||||
>&2 echo -e "\033[;96mRUNNING\033[;39m configurations"
|
. ./util.sh
|
||||||
./config.sh
|
|
||||||
|
set -a
|
||||||
|
DEV_DEBUG_FLAG=""
|
||||||
|
DEV_DEBUG_LEVEL=${DEV_DEBUG_LEVEL=0}
|
||||||
|
if [ $DEV_DEBUG_LEVEL -eq 1 ]; then
|
||||||
|
DEV_DEBUG_FLAG="-v"
|
||||||
|
elif [ $DEV_DEBUG_LEVEL -gt 1 ]; then
|
||||||
|
DEV_DEBUG_FLAG="-vv"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# disable override of config schema directory
|
||||||
|
unset CONFINI_DIR
|
||||||
|
|
||||||
|
set +a
|
||||||
|
|
||||||
|
LAST_BIT_POS=5
|
||||||
|
files=(deploy_contract_root deploy_contract_instance deploy_token init_custodial data_seeding)
|
||||||
|
description=("global contracts" "instance specific contracts" "token deployment" "initialize custodial engine" "data seeding for development")
|
||||||
|
|
||||||
|
>&2 echo -e "\033[;96mRUNNING configurations\033[;39m"
|
||||||
|
source ./config.sh
|
||||||
if [ $? -ne "0" ]; then
|
if [ $? -ne "0" ]; then
|
||||||
>&2 echo -e "\033[;31mFAILED\033[;39m configurations"
|
>&2 echo -e "\033[;31mFAILED configurations\033[;39m"
|
||||||
exit 1;
|
exit 1;
|
||||||
fi
|
fi
|
||||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m configurations"
|
>&2 echo -e "\033[;32mSUCCEEDED configurations\033[;39m"
|
||||||
|
|
||||||
if [[ $((RUN_MASK & 1)) -eq 1 ]]
|
>&2 echo -e "\033[;96mInitial configuration state\033[;39m"
|
||||||
then
|
|
||||||
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 1 - contract deployment"
|
|
||||||
./reset.sh
|
|
||||||
if [ $? -ne "0" ]; then
|
|
||||||
>&2 echo -e "\033[;31mFAILED\033[;39m RUN_MASK 1 - contract deployment"
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m RUN_MASK 1 - contract deployment"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $((RUN_MASK & 2)) -eq 2 ]]
|
confini-dump --schema-dir ./config
|
||||||
then
|
|
||||||
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 2 - custodial service initialization"
|
clear_pending_tx_hashes
|
||||||
./seed_cic_eth.sh
|
|
||||||
if [ $? -ne "0" ]; then
|
|
||||||
>&2 echo -e "\033[;31mFAILED\033[;39m RUN_MASK 2 - custodial service initialization"
|
bit=1
|
||||||
exit 1;
|
for ((i=0; i<$LAST_BIT_POS; i++)); do
|
||||||
|
runlevel="RUNLEVEL $bit"
|
||||||
|
if [[ $((RUN_MASK & $bit)) -eq ${bit} ]]; then
|
||||||
|
s="$runlevel - ${description[$i]}"
|
||||||
|
>&2 echo -e "\033[;96mRUNNING $s\033[;39m"
|
||||||
|
source $((i+1))_${files[$i]}.sh
|
||||||
|
if [ $? -ne "0" ]; then
|
||||||
|
>&2 echo -e "\033[;31mFAILED $s\033[;39m"
|
||||||
|
exit 1;
|
||||||
|
fi
|
||||||
|
>&2 echo -e "\033[;32mSUCCEEDED $s\033[;39m"
|
||||||
|
>&2 echo -e "\033[;96mConfiguration state after $runlevel execution\033[;39m"
|
||||||
|
confini-dump --schema-dir ./config
|
||||||
fi
|
fi
|
||||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m RUN_MASK 2 - custodial service initialization"
|
bit=$((bit*2))
|
||||||
fi
|
done
|
||||||
|
@ -1,88 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# defaults
|
|
||||||
source ${DEV_DATA_DIR}/env_reset
|
|
||||||
cat ${DEV_DATA_DIR}/env_reset
|
|
||||||
|
|
||||||
# Debug flag
|
|
||||||
debug='-vv'
|
|
||||||
empty_config_dir=$CONFINI_DIR/empty
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -a
|
|
||||||
|
|
||||||
unset CONFINI_DIR
|
|
||||||
|
|
||||||
# get required addresses from registries
|
|
||||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry`
|
|
||||||
account_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry`
|
|
||||||
reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
|
||||||
|
|
||||||
>&2 echo "Token registry: $token_index_address"
|
|
||||||
>&2 echo "Account registry: $account_index_address"
|
|
||||||
>&2 echo "Reserve address: $reserve_address ($TOKEN_SYMBOL)"
|
|
||||||
|
|
||||||
>&2 echo "create account for gas gifter"
|
|
||||||
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
|
|
||||||
#DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
|
|
||||||
|
|
||||||
|
|
||||||
>&2 echo "create account for sarafu gifter"
|
|
||||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
cic-eth-tag -i $CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
|
||||||
|
|
||||||
>&2 echo "create account for approval escrow owner"
|
|
||||||
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
|
|
||||||
|
|
||||||
#>&2 echo "create account for faucet owner"
|
|
||||||
#DEV_ETH_ACCOUNT_FAUCET_OWNER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
#echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_FAUCET_OWNER >> $env_out_file
|
|
||||||
#cic-eth-tag FAUCET_GIFTER $DEV_ETH_ACCOUNT_FAUCET_OWNER
|
|
||||||
|
|
||||||
>&2 echo "create account for accounts index writer"
|
|
||||||
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
|
||||||
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
|
||||||
>&2 echo "add acccounts index writer account as writer on contract"
|
|
||||||
#eth-accounts-index-writer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $account_index_address -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
|
||||||
|
|
||||||
# Transfer gas to custodial gas provider adddress
|
|
||||||
_CONFINI_DIR=$CONFINI_DIR
|
|
||||||
unset CONFINI_DIR
|
|
||||||
>&2 echo gift gas to gas gifter
|
|
||||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $DEV_GAS_AMOUNT
|
|
||||||
|
|
||||||
>&2 echo gift gas to sarafu token owner
|
|
||||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $DEV_GAS_AMOUNT
|
|
||||||
|
|
||||||
>&2 echo gift gas to account index owner
|
|
||||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $DEV_GAS_AMOUNT
|
|
||||||
|
|
||||||
|
|
||||||
# Send token to token creator
|
|
||||||
>&2 echo "gift tokens to sarafu owner"
|
|
||||||
echo "giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT"
|
|
||||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT
|
|
||||||
|
|
||||||
# Send token to token gifter
|
|
||||||
>&2 echo "gift tokens to keystore address"
|
|
||||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $DEV_TOKEN_AMOUNT
|
|
||||||
|
|
||||||
>&2 echo "set sarafu token to reserve token (temporarily while bancor contracts are not connected)"
|
|
||||||
export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
|
||||||
|
|
||||||
# Transfer tokens to gifter address
|
|
||||||
>&2 echo "transfer tokens to token gifter address"
|
|
||||||
>&2 erc20-transfer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER --fee-limit 100000 -e $reserve_address -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${DEV_TOKEN_AMOUNT:0:-1}
|
|
||||||
|
|
||||||
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
|
|
||||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock INIT
|
|
||||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock SEND
|
|
||||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock QUEUE
|
|
||||||
|
|
||||||
#confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
|
|
||||||
|
|
||||||
set +a
|
|
||||||
set +e
|
|
42
apps/contract-migration/util.sh
Normal file
42
apps/contract-migration/util.sh
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
function must_address() {
|
||||||
|
if [[ ! "$1" =~ ^(0x)?[0-9a-fA-F]{40}$ ]]; then
|
||||||
|
>&2 echo -e "\033[;31mvalue '$1' for $2 is not an address\033[;39m"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function must_hash_256() {
|
||||||
|
if [[ ! "$1" =~ ^(0x)?[0-9a-fA-F]{64}$ ]]; then
|
||||||
|
>&2 echo -e "\033[;31mvalue '$1' for $2 is not a 256-bit digest\033[;39m"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function must_eth_rpc() {
|
||||||
|
if [ -z "${RPC_PROVIDER}" ]; then
|
||||||
|
echo "\$RPC_PROVIDER not set!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Wait for the backend to be up, if we know where it is.
|
||||||
|
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
||||||
|
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||||
|
IFS=: read -a p <<< "$RPC_PROVIDER"
|
||||||
|
read -i "/" rpc_provider_port <<< "${p[2]}"
|
||||||
|
rpc_provider_host=${p[1]:2}
|
||||||
|
echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
|
||||||
|
./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port" -t $WAIT_FOR_TIMEOUT
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function clear_pending_tx_hashes() {
|
||||||
|
truncate -s 0 $DEV_DATA_DIR/hashes
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function add_pending_tx_hash() {
|
||||||
|
must_hash_256 $1
|
||||||
|
echo $1 >> $DEV_DATA_DIR/hashes
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user