Merge branch 'lash/split-migration' into 'master'
feat: Split and improve contract migration steps See merge request grassrootseconomics/cic-internal-integration!292
This commit is contained in:
commit
d8f51c5bdd
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-base-os/aux/wait-for-it/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-base-os/aux/wait-for-it/.travis.yml
Normal file
@ -0,0 +1,7 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-base-os/aux/wait-for-it/LICENSE
Normal file
@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
75
apps/cic-base-os/aux/wait-for-it/README.md
Normal file
@ -0,0 +1,75 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-base-os/aux/wait-for-it/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
3
apps/cic-cache/aux/wait-for-it/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
**/*.pyc
|
||||
.pydevproject
|
||||
/vendor/
|
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
7
apps/cic-cache/aux/wait-for-it/.travis.yml
Normal file
@ -0,0 +1,7 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
script:
|
||||
- python test/wait-for-it.py
|
||||
|
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
20
apps/cic-cache/aux/wait-for-it/LICENSE
Normal file
@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Giles Hall
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
75
apps/cic-cache/aux/wait-for-it/README.md
Normal file
@ -0,0 +1,75 @@
|
||||
# wait-for-it
|
||||
|
||||
`wait-for-it.sh` is a pure bash script that will wait on the availability of a
|
||||
host and TCP port. It is useful for synchronizing the spin-up of
|
||||
interdependent services, such as linked docker containers. Since it is a pure
|
||||
bash script, it does not have any external dependencies.
|
||||
|
||||
## Usage
|
||||
|
||||
```text
|
||||
wait-for-it.sh host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
For example, let's test to see if we can access port 80 on `www.google.com`,
|
||||
and if it is available, echo the message `google is up`.
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
You can set your own timeout with the `-t` or `--timeout=` option. Setting
|
||||
the timeout value to 0 will disable the timeout:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh -t 0 www.google.com:80 -- echo "google is up"
|
||||
wait-for-it.sh: waiting for www.google.com:80 without a timeout
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
google is up
|
||||
```
|
||||
|
||||
The subcommand will be executed regardless if the service is up or not. If you
|
||||
wish to execute the subcommand only if the service is up, add the `--strict`
|
||||
argument. In this example, we will test port 81 on `www.google.com` which will
|
||||
fail:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:81 --timeout=1 --strict -- echo "google is up"
|
||||
wait-for-it.sh: waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 1 seconds for www.google.com:81
|
||||
wait-for-it.sh: strict mode, refusing to execute subprocess
|
||||
```
|
||||
|
||||
If you don't want to execute a subcommand, leave off the `--` argument. This
|
||||
way, you can test the exit condition of `wait-for-it.sh` in your own scripts,
|
||||
and determine how to proceed:
|
||||
|
||||
```text
|
||||
$ ./wait-for-it.sh www.google.com:80
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:80
|
||||
wait-for-it.sh: www.google.com:80 is available after 0 seconds
|
||||
$ echo $?
|
||||
0
|
||||
$ ./wait-for-it.sh www.google.com:81
|
||||
wait-for-it.sh: waiting 15 seconds for www.google.com:81
|
||||
wait-for-it.sh: timeout occurred after waiting 15 seconds for www.google.com:81
|
||||
$ echo $?
|
||||
124
|
||||
```
|
||||
|
||||
## Community
|
||||
|
||||
*Debian*: There is a [Debian package](https://tracker.debian.org/pkg/wait-for-it).
|
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
182
apps/cic-cache/aux/wait-for-it/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
@ -8,6 +8,7 @@ import base64
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.runnable.daemons.query import (
|
||||
@ -23,26 +24,17 @@ rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
|
||||
|
||||
|
@ -9,6 +9,7 @@ import celery
|
||||
import confini
|
||||
|
||||
# local imports
|
||||
import cic_cache.cli
|
||||
from cic_cache.db import dsn_from_config
|
||||
from cic_cache.db.models.base import SessionBase
|
||||
from cic_cache.tasks.tx import *
|
||||
@ -16,35 +17,20 @@ from cic_cache.tasks.tx import *
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-cache')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-q', type=str, default='cic-cache', help='queue name for worker tasks')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_task
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
# process config
|
||||
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
||||
# connect to database
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
# verify database connection with minimal sanity query
|
||||
#session = SessionBase.create_session()
|
||||
#session.execute('select version_num from alembic_version')
|
||||
#session.close()
|
||||
|
||||
# set up celery
|
||||
current_app = celery.Celery(__name__)
|
||||
|
||||
@ -87,9 +73,9 @@ def main():
|
||||
elif args.v:
|
||||
argv.append('--loglevel=INFO')
|
||||
argv.append('-Q')
|
||||
argv.append(args.q)
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
argv.append('-n')
|
||||
argv.append(args.q)
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
|
||||
current_app.worker_main(argv)
|
||||
|
||||
|
@ -40,7 +40,7 @@ logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
# process args
|
||||
arg_flags = cic_cache.cli.argflag_std_read
|
||||
arg_flags = cic_cache.cli.argflag_std_base
|
||||
local_arg_flags = cic_cache.cli.argflag_local_sync
|
||||
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
|
@ -1,19 +1,20 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
|
||||
|
||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||
|
||||
COPY requirements.txt .
|
||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
#RUN pip install $pip_extra_index_url_flag .
|
||||
#RUN pip install .[server]
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
RUN apt-get install libffi-dev -y
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--force-reinstall \
|
||||
--no-cache \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
@ -23,10 +24,10 @@ RUN python setup.py install
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-cache/
|
||||
#COPY config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY ./aux/wait-for-it/wait-for-it.sh ./
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
|
@ -1,5 +1,5 @@
|
||||
celery==4.4.7
|
||||
erc20-demurrage-token~=0.0.5a3
|
||||
cic-eth-registry~=0.6.1a5
|
||||
chainlib~=0.0.9rc3
|
||||
cic_eth~=0.12.4a9
|
||||
cic-eth-registry~=0.6.1a6
|
||||
chainlib~=0.0.9rc1
|
||||
cic_eth~=0.12.4a11
|
||||
|
@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = cic-eth-aux-erc20-demurrage-token
|
||||
version = 0.0.2a6
|
||||
version = 0.0.2a7
|
||||
description = cic-eth tasks supporting erc20 demurrage token
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
@ -1,5 +1,5 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
hexathon~=0.0.1a8
|
||||
chainqueue>=0.0.4a6,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
|
@ -10,7 +10,6 @@ import datetime
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from cic_eth_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.tx import unpack
|
||||
from chainlib.connection import RPCConnection
|
||||
|
@ -76,7 +76,7 @@ arg_flags = cic_eth.cli.argflag_std_read
|
||||
local_arg_flags = cic_eth.cli.argflag_local_task
|
||||
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||
#argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
|
||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
||||
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
||||
@ -84,7 +84,7 @@ args = argparser.parse_args()
|
||||
|
||||
# process config
|
||||
extra_args = {
|
||||
'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||
# 'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
|
||||
'aux_all': None,
|
||||
'aux': None,
|
||||
'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
|
||||
@ -187,6 +187,17 @@ elif len(args.aux) > 0:
|
||||
logg.info('aux module {} found in path {}'.format(v, aux_dir))
|
||||
aux.append(v)
|
||||
|
||||
default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
defaullt_token_address = None
|
||||
if default_token_symbol:
|
||||
default_token_address = registry.by_name(default_token_symbol)
|
||||
else:
|
||||
default_token_address = registry.by_name('DefaultToken')
|
||||
c = ERC20Token(chain_spec, conn, default_token_address)
|
||||
default_token_symbol = c.symbol
|
||||
logg.info('found default token {} address {}'.format(default_token_symbol, default_token_address))
|
||||
config.add(default_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||
|
||||
for v in aux:
|
||||
mname = 'cic_eth_aux.' + v
|
||||
mod = importlib.import_module(mname)
|
||||
@ -204,8 +215,8 @@ def main():
|
||||
argv.append('-n')
|
||||
argv.append(config.get('CELERY_QUEUE'))
|
||||
|
||||
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
||||
BaseTask.default_token_symbol = default_token_symbol
|
||||
BaseTask.default_token_address = default_token_address
|
||||
default_token = ERC20Token(chain_spec, conn, add_0x(BaseTask.default_token_address))
|
||||
default_token.load(conn)
|
||||
BaseTask.default_token_decimals = default_token.decimals
|
||||
|
@ -10,7 +10,7 @@ version = (
|
||||
0,
|
||||
12,
|
||||
4,
|
||||
'alpha.11',
|
||||
'alpha.13',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
@ -1,46 +1,36 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||
|
||||
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||
# do load these all into setup.py later
|
||||
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||
#COPY cic-eth/requirements.txt .
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
# pip install --index-url https://pypi.org/simple \
|
||||
# --force-reinstall \
|
||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
# -r requirements.txt
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
RUN apt-get install libffi-dev
|
||||
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--force-reinstall \
|
||||
--no-cache \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--force-reinstall \
|
||||
--no-cache \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r admin_requirements.txt
|
||||
|
||||
# always install the latest signer
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
crypto-dev-signer
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
@ -53,7 +43,7 @@ RUN chmod 755 *.sh
|
||||
# # ini files in config directory defines the configurable parameters for the application
|
||||
# # they can all be overridden by environment variables
|
||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-eth/
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
|
@ -2,5 +2,6 @@
|
||||
|
||||
set -e
|
||||
>&2 echo executing database migration
|
||||
python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
#python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
python scripts/migrate.py --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||
set +e
|
||||
|
@ -1,4 +1,4 @@
|
||||
celery==4.4.7
|
||||
chainlib-eth>=0.0.9rc4,<0.1.0
|
||||
chainlib-eth>=0.0.10a4,<0.1.0
|
||||
semver==2.13.0
|
||||
crypto-dev-signer>=0.4.15rc2,<0.5.0
|
||||
|
@ -1,5 +1,5 @@
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
chainqueue>=0.0.6a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.7a3,<0.1.0
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
redis==3.5.3
|
||||
@ -8,7 +8,7 @@ pycryptodome==3.10.1
|
||||
liveness~=0.0.1a7
|
||||
eth-address-index>=0.2.4a1,<0.3.0
|
||||
eth-accounts-index>=0.1.2a3,<0.2.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
|
@ -1,6 +1,7 @@
|
||||
[metadata]
|
||||
name = cic-eth
|
||||
version = attr: cic_eth.version.__version_string__
|
||||
#version = attr: cic_eth.version.__version_string__
|
||||
version = 0.12.4a13
|
||||
description = CIC Network Ethereum interaction
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
|
@ -110,7 +110,7 @@ def test_tokens_noproof(
|
||||
custodial_roles,
|
||||
foo_token_declaration,
|
||||
bar_token_declaration,
|
||||
celery_worker,
|
||||
celery_session_worker,
|
||||
):
|
||||
|
||||
api = Api(str(default_chain_spec), queue=None, callback_param='foo')
|
||||
|
@ -1,6 +1,6 @@
|
||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
chainqueue>=0.0.5a3,<0.1.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
pycryptodome==3.10.1
|
||||
|
@ -1,16 +1,16 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
#FROM node:15.3.0-alpine3.10
|
||||
FROM node:lts-alpine3.14
|
||||
FROM node:15.3.0-alpine3.10
|
||||
#FROM node:lts-alpine3.14
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apk add --no-cache postgresql bash
|
||||
|
||||
# copy the dependencies
|
||||
COPY package.json package-lock.json .
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
npm set cache /root/.npm && \
|
||||
npm ci
|
||||
COPY package.json package-lock.json ./
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
RUN npm set cache /root/.npm && \
|
||||
npm cache verify && \
|
||||
npm ci --verbose
|
||||
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
|
@ -2,21 +2,24 @@
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||
RUN apt-get install libffi-dev -y
|
||||
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x *.sh
|
||||
COPY docker/*.sh ./
|
||||
RUN chmod +x /root/*.sh
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
|
23
apps/cic-signer/Dockerfile
Normal file
23
apps/cic-signer/Dockerfile
Normal file
@ -0,0 +1,23 @@
|
||||
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apt-get install libffi-dev -y
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--force-reinstall \
|
||||
--no-cache \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
#RUN chmod +x *.sh
|
1
apps/cic-signer/requirements.txt
Normal file
1
apps/cic-signer/requirements.txt
Normal file
@ -0,0 +1 @@
|
||||
funga-eth[sql]>=0.5.1a1,<0.6.0
|
@ -7,6 +7,7 @@ from typing import Optional
|
||||
# third-party imports
|
||||
from cic_eth.api import Api
|
||||
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.transaction import from_wei
|
||||
@ -102,7 +103,7 @@ def get_cached_available_balance(blockchain_address: str) -> float:
|
||||
:rtype: float
|
||||
"""
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
key = cache_data_key(identifier, salt=':cic.balances')
|
||||
key = cache_data_key(identifier, salt=MetadataPointer.BALANCES)
|
||||
cached_balances = get_cached_data(key=key)
|
||||
if cached_balances:
|
||||
return calculate_available_balance(json.loads(cached_balances))
|
||||
@ -117,5 +118,5 @@ def get_cached_adjusted_balance(identifier: bytes):
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
return get_cached_data(key)
|
||||
|
@ -7,6 +7,7 @@ from typing import Optional
|
||||
import celery
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local import
|
||||
from cic_ussd.account.chain import Chain
|
||||
@ -53,7 +54,7 @@ def get_cached_statement(blockchain_address: str) -> bytes:
|
||||
:rtype: str
|
||||
"""
|
||||
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
||||
key = cache_data_key(identifier=identifier, salt=':cic.statement')
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.STATEMENT)
|
||||
return get_cached_data(key=key)
|
||||
|
||||
|
||||
|
@ -5,6 +5,7 @@ from typing import Dict, Optional
|
||||
|
||||
# external imports
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@ -23,7 +24,7 @@ def get_cached_default_token(chain_str: str) -> Optional[str]:
|
||||
:rtype:
|
||||
"""
|
||||
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=':cic.default_token_data')
|
||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=MetadataPointer.TOKEN_DEFAULT)
|
||||
return get_cached_data(key=key)
|
||||
|
||||
|
||||
|
@ -2,7 +2,8 @@
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from redis import Redis
|
||||
|
||||
logg = logging.getLogger()
|
||||
@ -38,7 +39,7 @@ def get_cached_data(key: str):
|
||||
return cache.get(name=key)
|
||||
|
||||
|
||||
def cache_data_key(identifier: bytes, salt: str):
|
||||
def cache_data_key(identifier: bytes, salt: MetadataPointer):
|
||||
"""
|
||||
:param identifier:
|
||||
:type identifier:
|
||||
@ -49,5 +50,5 @@ def cache_data_key(identifier: bytes, salt: str):
|
||||
"""
|
||||
hash_object = hashlib.new("sha256")
|
||||
hash_object.update(identifier)
|
||||
hash_object.update(salt.encode(encoding="utf-8"))
|
||||
hash_object.update(salt.value.encode(encoding="utf-8"))
|
||||
return hash_object.digest().hex()
|
||||
|
@ -3,6 +3,7 @@ import json
|
||||
|
||||
# external imports
|
||||
from cic_eth.api import Api
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
||||
@ -109,7 +110,7 @@ class Account(SessionBase):
|
||||
:rtype: str
|
||||
"""
|
||||
identifier = bytes.fromhex(self.blockchain_address)
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
account_metadata = get_cached_data(key)
|
||||
if not account_metadata:
|
||||
return self.phone_number
|
||||
|
@ -5,6 +5,7 @@ from datetime import datetime, timedelta
|
||||
|
||||
# external imports
|
||||
import i18n.config
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import (calculate_available_balance,
|
||||
@ -163,7 +164,7 @@ class MenuProcessor:
|
||||
token_symbol = get_default_token_symbol()
|
||||
blockchain_address = self.account.blockchain_address
|
||||
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
||||
key = cache_data_key(self.identifier, ':cic.balances')
|
||||
key = cache_data_key(self.identifier, MetadataPointer.BALANCES)
|
||||
cache_data(key, json.dumps(balances))
|
||||
available_balance = calculate_available_balance(balances)
|
||||
now = datetime.now()
|
||||
@ -173,7 +174,7 @@ class MenuProcessor:
|
||||
else:
|
||||
timestamp = int((now - timedelta(30)).timestamp())
|
||||
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
||||
key = cache_data_key(self.identifier, ':cic.adjusted_balance')
|
||||
key = cache_data_key(self.identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
cache_data(key, json.dumps(adjusted_balance))
|
||||
|
||||
query_statement(blockchain_address)
|
||||
|
@ -10,14 +10,14 @@ import i18n
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from confini import Config
|
||||
from cic_types.ext.metadata import Metadata
|
||||
from cic_types.ext.metadata.signer import Signer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.cache import Cache
|
||||
from cic_ussd.db import dsn_from_config
|
||||
from cic_ussd.db.models.base import SessionBase
|
||||
from cic_ussd.metadata.signer import Signer
|
||||
from cic_ussd.metadata.base import Metadata
|
||||
from cic_ussd.phone_number import Support
|
||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||
from cic_ussd.validator import validate_presence
|
||||
@ -87,11 +87,8 @@ Signer.key_file_path = key_file_path
|
||||
i18n.load_path.append(config.get('LOCALE_PATH'))
|
||||
i18n.set('fallback', config.get('LOCALE_FALLBACK'))
|
||||
|
||||
chain_spec = ChainSpec(
|
||||
common_name=config.get('CIC_COMMON_NAME'),
|
||||
engine=config.get('CIC_ENGINE'),
|
||||
network_id=config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
|
||||
Chain.spec = chain_spec
|
||||
Support.phone_number = config.get('OFFICE_SUPPORT_PHONE')
|
||||
|
@ -12,6 +12,9 @@ import i18n
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from confini import Config
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from cic_types.ext.metadata import Metadata
|
||||
from cic_types.ext.metadata.signer import Signer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@ -25,8 +28,6 @@ from cic_ussd.files.local_files import create_local_file_data_stores, json_file_
|
||||
from cic_ussd.http.requests import get_request_endpoint, get_request_method
|
||||
from cic_ussd.http.responses import with_content_headers
|
||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||
from cic_ussd.metadata.base import Metadata
|
||||
from cic_ussd.metadata.signer import Signer
|
||||
from cic_ussd.phone_number import process_phone_number, Support, E164Format
|
||||
from cic_ussd.processor.ussd import handle_menu_operations
|
||||
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||
@ -96,11 +97,7 @@ celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY
|
||||
states = json_file_parser(filepath=config.get('MACHINE_STATES'))
|
||||
transitions = json_file_parser(filepath=config.get('MACHINE_TRANSITIONS'))
|
||||
|
||||
chain_spec = ChainSpec(
|
||||
common_name=config.get('CIC_COMMON_NAME'),
|
||||
engine=config.get('CIC_ENGINE'),
|
||||
network_id=config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
|
||||
Chain.spec = chain_spec
|
||||
UssdStateMachine.states = states
|
||||
@ -113,7 +110,7 @@ default_token_data = query_default_token(chain_str)
|
||||
|
||||
# cache default token for re-usability
|
||||
if default_token_data:
|
||||
cache_key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
||||
cache_key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||
else:
|
||||
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||
|
@ -3,8 +3,10 @@ import json
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
# third-party imports
|
||||
# external imports
|
||||
import celery
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
||||
@ -87,7 +89,7 @@ def balances_callback(result: list, param: str, status_code: int):
|
||||
|
||||
balances = result[0]
|
||||
identifier = bytes.fromhex(param)
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
cache_data(key, json.dumps(balances))
|
||||
|
||||
|
||||
|
@ -2,9 +2,10 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
# external imports
|
||||
import celery
|
||||
import i18n
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
@ -49,7 +50,7 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
|
||||
statement_transactions.append(parsed_transaction)
|
||||
data = json.dumps(statement_transactions)
|
||||
identifier = bytes.fromhex(querying_party)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
cache_data(key, data)
|
||||
|
||||
|
||||
|
2
apps/cic-ussd/config/chain.ini
Normal file
2
apps/cic-ussd/config/chain.ini
Normal file
@ -0,0 +1,2 @@
|
||||
[chain]
|
||||
spec =
|
@ -1,5 +1,2 @@
|
||||
[cic]
|
||||
engine = evm
|
||||
common_name = bloxberg
|
||||
network_id = 8996
|
||||
meta_url = http://localhost:63380
|
||||
|
2
apps/cic-ussd/config/test/chain.ini
Normal file
2
apps/cic-ussd/config/test/chain.ini
Normal file
@ -0,0 +1,2 @@
|
||||
[chain]
|
||||
spec = 'evm:foo:1:bar'
|
@ -1,5 +1,2 @@
|
||||
[cic]
|
||||
engine = evm
|
||||
common_name = bloxberg
|
||||
network_id = 8996
|
||||
meta_url = http://test-meta.io
|
||||
|
@ -1,6 +1,8 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
RUN apt-get install -y redis-server
|
||||
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||
|
||||
RUN apt-get install redis-server libffi-dev -y
|
||||
# create secrets directory
|
||||
RUN mkdir -vp pgp/keys
|
||||
|
||||
@ -8,28 +10,27 @@ RUN mkdir -vp pgp/keys
|
||||
RUN mkdir -vp cic-ussd
|
||||
RUN mkdir -vp data
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
COPY *requirements.txt ./
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY cic_ussd/db/ussd_menu.json data/
|
||||
|
||||
COPY docker/*.sh .
|
||||
COPY docker/*.sh ./
|
||||
RUN chmod +x /root/*.sh
|
||||
|
||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
|
@ -4,9 +4,9 @@ billiard==3.6.4.0
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
cffi==1.14.6
|
||||
cic-eth[services]~=0.12.4a11
|
||||
cic-eth~=0.12.4a13
|
||||
cic-notify~=0.4.0a10
|
||||
cic-types~=0.2.0a3
|
||||
cic-types~=0.2.0a6
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
phonenumbers==8.12.12
|
||||
psycopg2==2.8.6
|
||||
|
@ -1,3 +1,4 @@
|
||||
cic-eth[services]~=0.12.4a13
|
||||
Faker==8.1.2
|
||||
faker-e164==0.1.0
|
||||
pytest==6.2.4
|
||||
|
@ -4,8 +4,7 @@ import time
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.statement import (filter_statement_transactions,
|
||||
@ -48,7 +47,7 @@ def test_generate(activated_account,
|
||||
generate(querying_party, None, sender_transaction)
|
||||
time.sleep(2)
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
statement = get_cached_data(key)
|
||||
statement = json.loads(statement)
|
||||
assert len(statement) == 1
|
||||
|
@ -3,7 +3,7 @@ import json
|
||||
import datetime
|
||||
|
||||
# external imports
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import get_cached_available_balance
|
||||
@ -58,7 +58,7 @@ def test_menu_processor(activated_account,
|
||||
token_symbol=token_symbol)
|
||||
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
adjusted_balance = 45931650.64654012
|
||||
cache_data(key, json.dumps(adjusted_balance))
|
||||
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
||||
|
@ -7,6 +7,7 @@ import time
|
||||
import i18n
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@ -45,7 +46,7 @@ def test_handle_menu(activated_account,
|
||||
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
cache_data(key, json.dumps(preferences))
|
||||
time.sleep(2)
|
||||
menu_resp = handle_menu(pending_account, init_database)
|
||||
|
@ -1,20 +1,18 @@
|
||||
# standard imports
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.statement import generate, filter_statement_transactions
|
||||
from cic_ussd.account.statement import filter_statement_transactions
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.error import AccountCreationDataNotFound
|
||||
from cic_ussd.metadata import PreferencesMetadata
|
||||
|
||||
|
||||
# test imports
|
||||
@ -89,7 +87,7 @@ def test_balances_callback(activated_account, balances, celery_session_worker):
|
||||
[balances, activated_account.blockchain_address, status_code])
|
||||
s_balances_callback.apply_async().get()
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
cached_balances = get_cached_data(key)
|
||||
cached_balances = json.loads(cached_balances)
|
||||
assert cached_balances == balances[0]
|
||||
|
@ -1,11 +1,11 @@
|
||||
# standard imports
|
||||
import json
|
||||
import os
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
@ -27,7 +27,7 @@ def test_query_person_metadata(activated_account,
|
||||
s_query_person_metadata = celery.signature(
|
||||
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
||||
s_query_person_metadata.apply().get()
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
cached_person_metadata = get_cached_data(key)
|
||||
cached_person_metadata = json.loads(cached_person_metadata)
|
||||
assert cached_person_metadata == person_metadata
|
||||
@ -46,7 +46,7 @@ def test_query_preferences_metadata(activated_account,
|
||||
query_preferences_metadata = celery.signature(
|
||||
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
||||
query_preferences_metadata.apply().get()
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
cached_preferences_metadata = get_cached_data(key)
|
||||
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
||||
assert cached_preferences_metadata == preferences
|
||||
|
@ -4,6 +4,7 @@ import json
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
@ -38,7 +39,7 @@ def test_cache_statement(activated_account,
|
||||
transaction_result):
|
||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
cached_statement = get_cached_data(key)
|
||||
assert cached_statement is None
|
||||
s_parse_transaction = celery.signature(
|
||||
|
@ -3,6 +3,7 @@ import hashlib
|
||||
import json
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
@ -12,7 +13,7 @@ from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
|
||||
def test_cache_data(init_cache):
|
||||
identifier = 'some_key'.encode()
|
||||
key = cache_data_key(identifier, ':testing')
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
assert get_cached_data(key) is None
|
||||
cache_data(key, json.dumps('some_value'))
|
||||
assert get_cached_data(key) is not None
|
||||
@ -20,10 +21,10 @@ def test_cache_data(init_cache):
|
||||
|
||||
def test_cache_data_key():
|
||||
identifier = 'some_key'.encode()
|
||||
key = cache_data_key(identifier, ':testing')
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
hash_object = hashlib.new("sha256")
|
||||
hash_object.update(identifier)
|
||||
hash_object.update(':testing'.encode(encoding="utf-8"))
|
||||
hash_object.update(':cic.person'.encode(encoding="utf-8"))
|
||||
assert hash_object.digest().hex() == key
|
||||
|
||||
|
||||
|
12
apps/cic-ussd/tests/fixtures/account.py
vendored
12
apps/cic-ussd/tests/fixtures/account.py
vendored
@ -4,7 +4,7 @@ import random
|
||||
|
||||
# external accounts
|
||||
import pytest
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
@ -56,7 +56,7 @@ def cache_account_creation_data(init_cache, account_creation_data):
|
||||
def cache_balances(activated_account, balances, init_cache):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
balances = json.dumps(balances[0])
|
||||
key = cache_data_key(identifier, ':cic.balances')
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
cache_data(key, balances)
|
||||
|
||||
|
||||
@ -64,7 +64,7 @@ def cache_balances(activated_account, balances, init_cache):
|
||||
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
chain_str = Chain.spec.__str__()
|
||||
data = json.dumps(default_token_data)
|
||||
key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
||||
key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||
cache_data(key, data)
|
||||
|
||||
|
||||
@ -72,7 +72,7 @@ def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
person = json.dumps(person_metadata)
|
||||
key = cache_data_key(identifier, ':cic.person')
|
||||
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||
cache_data(key, person)
|
||||
|
||||
|
||||
@ -80,7 +80,7 @@ def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||
def cache_preferences(activated_account, init_cache, preferences):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
preferences = json.dumps(preferences)
|
||||
key = cache_data_key(identifier, ':cic.preferences')
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
cache_data(key, preferences)
|
||||
|
||||
|
||||
@ -88,7 +88,7 @@ def cache_preferences(activated_account, init_cache, preferences):
|
||||
def cache_statement(activated_account, init_cache, statement):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
statement = json.dumps(statement)
|
||||
key = cache_data_key(identifier, ':cic.statement')
|
||||
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||
cache_data(key, statement)
|
||||
|
||||
|
||||
|
6
apps/cic-ussd/tests/fixtures/config.py
vendored
6
apps/cic-ussd/tests/fixtures/config.py
vendored
@ -41,11 +41,7 @@ def init_state_machine(load_config):
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def load_chain_spec(load_config):
|
||||
chain_spec = ChainSpec(
|
||||
common_name=load_config.get('CIC_COMMON_NAME'),
|
||||
engine=load_config.get('CIC_ENGINE'),
|
||||
network_id=load_config.get('CIC_NETWORK_ID')
|
||||
)
|
||||
chain_spec = ChainSpec.from_chain_str(load_config.get('CHAIN_SPEC'))
|
||||
Chain.spec = chain_spec
|
||||
|
||||
|
||||
|
26
apps/contract-migration/1_deploy_contract_root.sh
Normal file
26
apps/contract-migration/1_deploy_contract_root.sh
Normal file
@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
must_eth_rpc
|
||||
|
||||
# Deploy address declarator registry
|
||||
>&2 echo -e "\033[;96mDeploy address declarator contract\033[;39m"
|
||||
DEV_ADDRESS_DECLARATOR=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG $DEV_DECLARATOR_DESCRIPTION`
|
||||
|
||||
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +a
|
||||
set +e
|
64
apps/contract-migration/2_deploy_contract_instance.sh
Normal file
64
apps/contract-migration/2_deploy_contract_instance.sh
Normal file
@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||
must_eth_rpc
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
|
||||
# Deploy contract registry contract
|
||||
>&2 echo -e "\033[;96mDeploy contract registry contract\033[;39m"
|
||||
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy $fee_price_arg -i $CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --identifier DefaultToken --address-declarator $DEV_ADDRESS_DECLARATOR -p $RPC_PROVIDER $DEV_DEBUG_FLAG -s -u -w`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd contract registry record to itself\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier ContractRegistry $CIC_REGISTRY_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd address declarator record to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AddressDeclarator $DEV_ADDRESS_DECLARATOR`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Deploy token index contract
|
||||
>&2 echo -e "\033[;96mDeploy token symbol index contract\033[;39m"
|
||||
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR`
|
||||
|
||||
>&2 echo -e "\033[;96mAdd token symbol index record to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
#>&2 echo "add reserve token to token index"
|
||||
#eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
||||
|
||||
|
||||
echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
|
||||
set +a
|
||||
set +e
|
136
apps/contract-migration/3_deploy_token.sh
Normal file
136
apps/contract-migration/3_deploy_token.sh
Normal file
@ -0,0 +1,136 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
have_default_token=1
|
||||
token_feedback_display_string='token'
|
||||
|
||||
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
|
||||
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||
must_eth_rpc
|
||||
|
||||
|
||||
function _deploy_token_defaults {
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo -e "\033[;33mtoken symbol not set, setting defaults for type $TOKEN_TYPE\033[;39m"
|
||||
TOKEN_SYMBOL=$1
|
||||
TOKEN_NAME=$2
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo -e "\033[;33mtoken name not set, setting same as symbol for type $TOKEN_TYPE\033[;39m"
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
TOKEN_DECIMALS=${TOKEN_DECIMALS:-6}
|
||||
|
||||
default_token_registered=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw DefaultToken`
|
||||
if [ $default_token_registered == '0000000000000000000000000000000000000000' ]; then
|
||||
>&2 echo -e "\033[;33mFound no existing default token in token registry"
|
||||
have_default_token=''
|
||||
token_feedback_display_string='default token'
|
||||
fi
|
||||
>&2 echo -e "\033[;96mdeploying $token_feedback_display_string ..."
|
||||
>&2 echo -e "Type: $TOKEN_TYPE"
|
||||
>&2 echo -e "Name: $TOKEN_NAME"
|
||||
>&2 echo -e "Symbol: $TOKEN_SYMBOL"
|
||||
>&2 echo -e "Decimals: $TOKEN_DECIMALS\033[;39m"
|
||||
|
||||
}
|
||||
|
||||
function deploy_token_giftable_erc20_token() {
|
||||
_deploy_token_defaults "GFT" "Giftable Token"
|
||||
TOKEN_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG`
|
||||
}
|
||||
|
||||
|
||||
function deploy_token_erc20_demurrage_token() {
|
||||
_deploy_token_defaults "DET" "Demurrage Token"
|
||||
TOKEN_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
|
||||
}
|
||||
|
||||
function deploy_accounts_index() {
|
||||
# Deploy accounts index contact
|
||||
>&2 echo -e "\033[;96mDeploy accounts index contract for token $TOKEN_SYMBOL\033[;39m"
|
||||
DEV_ACCOUNTS_INDEX_ADDRESS=`okota-accounts-index-deploy $gas_price_arg -u -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR --token-address $1`
|
||||
|
||||
if [ -z "$have_default_token" ]; then
|
||||
>&2 echo -e "\033[;96mAdd acccounts index record for default token to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AccountRegistry $DEV_ACCOUNTS_INDEX_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
}
|
||||
|
||||
function deploy_minter_faucet() {
|
||||
FAUCET_AMOUNT=${FAUCET_AMOUNT:-0}
|
||||
|
||||
# Token faucet contract
|
||||
>&2 echo -e "\033[;96mDeploy token faucet contract for token $TOKEN_SYMBOL\033[;39m"
|
||||
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||
faucet_address=`sarafu-faucet-deploy $fee_price_arg -s -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --account-index-address $accounts_index_address $1`
|
||||
|
||||
>&2 echo -e "\033[;96mSet token faucet amount to $FAUCET_AMOUNT\033[;39m"
|
||||
r=`sarafu-faucet-set $fee_price_arg -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $faucet_address $DEV_DEBUG_FLAG -s --fee-limit 100000 $FAUCET_AMOUNT`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
if [ -z $have_default_token ]; then
|
||||
>&2 echo -e "\033[;96mRegister faucet in registry\033[;39m"
|
||||
r=`eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier Faucet $faucet_address`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
|
||||
>&2 echo -e "\033[;96mSet faucet as token minter\033[;39m"
|
||||
r=`giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $TOKEN_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG $faucet_address`
|
||||
add_pending_tx_hash $r
|
||||
}
|
||||
|
||||
|
||||
TOKEN_TYPE=${TOKEN_TYPE:-giftable_erc20_token}
|
||||
deploy_token_${TOKEN_TYPE}
|
||||
|
||||
if [ -z "$have_default_token" ]; then
|
||||
>&2 echo -e "\033[;96mAdd default token to contract registry\033[;39m"
|
||||
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier DefaultToken $TOKEN_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
fi
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mAdd token symbol $TOKEN_SYMBOL to token address $TOKEN_ADDRESS mapping to token index\033[;39m"
|
||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
|
||||
r=`eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $token_index_address $TOKEN_ADDRESS`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
TOKEN_MINT_AMOUNT=${TOKEN_MINT_AMOUNT:-${DEV_TOKEN_MINT_AMOUNT}}
|
||||
>&2 echo -e "\033[;96mMinting $TOKEN_MINT_AMOUNT tokens\033[;39m"
|
||||
r=`giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -u $DEV_DEBUG_FLAG -s -w -e $TOKEN_ADDRESS "$DEV_TOKEN_MINT_AMOUNT"`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Create accounts index for default token
|
||||
deploy_accounts_index $TOKEN_ADDRESS
|
||||
|
||||
# Connect a minter component if defined
|
||||
TOKEN_MINTER_MODE=${TOKEN_MINTER_MODE:-"faucet"}
|
||||
if [ -z "$TOKEN_MINTER_MODE" ]; then
|
||||
>&2 echo -e "\033[;33mNo token minter mode set.\033[;39m"
|
||||
else
|
||||
deploy_minter_${TOKEN_MINTER_MODE} $TOKEN_ADDRESS
|
||||
fi
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +e
|
||||
set +a
|
67
apps/contract-migration/4_init_custodial.sh
Normal file
67
apps/contract-migration/4_init_custodial.sh
Normal file
@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||
must_eth_rpc
|
||||
|
||||
# get required addresses from registries
|
||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
|
||||
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||
reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address $DEV_DEBUG_FLAG --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
||||
|
||||
|
||||
REDIS_HOST_CALLBACK=${REDIS_HOST_CALLBACK:-$REDIS_HOST}
|
||||
REDIS_PORT_CALLBACK=${REDIS_PORT_CALLBACK:-$REDIS_PORT}
|
||||
>&2 echo -e "\033[;96mcreate account for gas gifter\033[;39m"
|
||||
gas_gifter=`cic-eth-create --redis-timeout 120 $DEV_DEBUG_FLAG --redis-host-callback $REDIS_HOST_CALLBACK --redis-port-callback $REDIS_PORT_CALLBACK --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $gas_gifter
|
||||
|
||||
>&2 echo -e "\033[;96mcreate account for accounts index writer\033[;39m"
|
||||
accounts_index_writer=`cic-eth-create --redis-timeout 120 $DEV_DEBUG_FLAG --redis-host-callback $REDIS_HOST_CALLBACK --redis-port-callback $REDIS_PORT_CALLBACK --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $accounts_index_writer
|
||||
|
||||
|
||||
# Assign system writer for accounts index
|
||||
>&2 echo -e "\033[;96mEnable accounts index writer $accounts_index_writer to write to accounts index contract at $accounts_index_address\033[;39m"
|
||||
r=`eth-accounts-index-writer -s -w -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $accounts_index_address $DEV_DEBUG_FLAG $accounts_index_writer`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Transfer gas to custodial gas provider adddress
|
||||
>&2 echo -e "\033[;96mGift gas to gas gifter $gas_gifter\033[;39m"
|
||||
echo "eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT"
|
||||
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
>&2 echo -e "\033[;96mgift gas to accounts index owner $accounts_index_writer\033[;39m"
|
||||
# for now we are using the same key for both
|
||||
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $accounts_index_writer $DEV_GAS_AMOUNT`
|
||||
add_pending_tx_hash $r
|
||||
|
||||
|
||||
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock INIT
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock SEND
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock QUEUE
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +e
|
||||
set +a
|
27
apps/contract-migration/5_data_seeding.sh
Normal file
27
apps/contract-migration/5_data_seeding.sh
Normal file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
. util.sh
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
must_address "$CIC_REGISTRY_ADDRESS" "registry"
|
||||
must_eth_rpc
|
||||
|
||||
|
||||
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
|
||||
|
||||
|
||||
>&2 echo -e "\033[;96mEnable default wallet $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER to write to accounts index contract at $accounts_index_address\033[;39m"
|
||||
r=`eth-accounts-index-writer -s -w -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $accounts_index_address $DEV_DEBUG_FLAG $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
|
||||
add_pending_tx_hash $r
|
@ -1,53 +1,44 @@
|
||||
# Contract Migration
|
||||
# CIC-stack system bootstrap scripts
|
||||
|
||||
Common docker artifacts and bootstrap scripts
|
||||
|
||||
## How this repo works
|
||||
|
||||
This repo builds contracts and deploys them to a chain
|
||||
## 1. Deploy global contracts.
|
||||
|
||||
First, bring up an eth evm provider
|
||||
```
|
||||
docker-compose up eth
|
||||
```
|
||||
Global contracts are contracts that may or may not be used to contribute to a data store intended for consumption across instances.
|
||||
|
||||
Now build this repo's image and run it against the 'eth' service (ganache, for example). You will need to bind to the docker-compose network (cic-network) and mount the special contract output folder that dependent services use to get deployed contract addresses.
|
||||
In the current version of the scripts, the only contract deployed is the `AddressDeclarator`. Also, in the current version, the `AddressDeclarator` is required as a storage backend for some of the instance contracts.
|
||||
|
||||
Here is how to do that in one shot:
|
||||
```
|
||||
docker build -t registry.gitlab.com/grassrootseconomics/cic-docker-internal -f docker/ . && docker run --env ETH_PROVIDER=http://eth:8545 --net cic-network -v cic-docker-internal_contract-config:/tmp/cic/config --rm -it registry.gitlab.com/grassrootseconomics/cic-docker-internal reset.sh
|
||||
```
|
||||
|
||||
Stop the containers and bring down the services with
|
||||
```
|
||||
docker-compose down
|
||||
```
|
||||
## 2. Deploy instance contracts.
|
||||
|
||||
If you want a fresh start to the dev environment then bring down the services and delete their associated volumes with
|
||||
Instance contracts are contracts whose contents are limited to the context of a single custodial engine system.
|
||||
|
||||
```
|
||||
docker-compose down -v
|
||||
```
|
||||
This includes a registry of contracts used by the engine, as well as registry contracts for user accounts and tokens.
|
||||
|
||||
A goal is to go through all of these containers and create a default non-root user a la:
|
||||
https://vsupalov.com/docker-shared-permissions/
|
||||
|
||||
## Tips and Tricks
|
||||
## 3. Deploy token.
|
||||
|
||||
Sometimes you just want to hold a container open in docker compose so you can exec into it and poke around. Replace "command" with
|
||||
Deploys a CIC token, adding it to the token registry.
|
||||
|
||||
```
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
tail -f /dev/null
|
||||
```
|
||||
then
|
||||
The first token deployed becomes the default token of the instance.
|
||||
|
||||
```
|
||||
docker exec -it [IMAGE_NANE] sh
|
||||
```
|
||||
In the current version of the scripts, two token types may be deployed; [`giftable_erc20_token`](https://gitlab.com/cicnet/eth-erc20) and [`erc20_demurrage_token`](https://gitlab.com/cicnet/erc20-demurrage-token).
|
||||
|
||||
---
|
||||
This step may be run multiple times, as long as the token symbol is different from all previously deployed tokens.
|
||||
|
||||
|
||||
## 4. Initialize custodial engine.
|
||||
|
||||
Adds system accounts to the custodial engine, and unlocks the initialization seal. After this step, the custodial system is ready to use.
|
||||
|
||||
|
||||
## Services dependency graph
|
||||
|
||||
1. evm
|
||||
2. bootstrap runlevel 1
|
||||
3. bootstrap runlevel 2
|
||||
4. bootstrap runlevel 3
|
||||
5. redis
|
||||
6. postgres
|
||||
7. cic-eth-tasker
|
||||
8. boostrap runlevel 4
|
||||
|
53
apps/contract-migration/README_bloxberg.md
Normal file
53
apps/contract-migration/README_bloxberg.md
Normal file
@ -0,0 +1,53 @@
|
||||
# Contract Migration
|
||||
|
||||
Common docker artifacts and bootstrap scripts
|
||||
|
||||
## How this repo works
|
||||
|
||||
This repo builds contracts and deploys them to a chain
|
||||
|
||||
First, bring up an eth evm provider
|
||||
```
|
||||
docker-compose up eth
|
||||
```
|
||||
|
||||
Now build this repo's image and run it against the 'eth' service (ganache, for example). You will need to bind to the docker-compose network (cic-network) and mount the special contract output folder that dependent services use to get deployed contract addresses.
|
||||
|
||||
Here is how to do that in one shot:
|
||||
```
|
||||
docker build -t registry.gitlab.com/grassrootseconomics/cic-docker-internal -f docker/ . && docker run --env ETH_PROVIDER=http://eth:8545 --net cic-network -v cic-docker-internal_contract-config:/tmp/cic/config --rm -it registry.gitlab.com/grassrootseconomics/cic-docker-internal reset.sh
|
||||
```
|
||||
|
||||
Stop the containers and bring down the services with
|
||||
```
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
If you want a fresh start to the dev environment then bring down the services and delete their associated volumes with
|
||||
|
||||
```
|
||||
docker-compose down -v
|
||||
```
|
||||
|
||||
A goal is to go through all of these containers and create a default non-root user a la:
|
||||
https://vsupalov.com/docker-shared-permissions/
|
||||
|
||||
## Tips and Tricks
|
||||
|
||||
Sometimes you just want to hold a container open in docker compose so you can exec into it and poke around. Replace "command" with
|
||||
|
||||
```
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
tail -f /dev/null
|
||||
```
|
||||
then
|
||||
|
||||
```
|
||||
docker exec -it [IMAGE_NANE] sh
|
||||
```
|
||||
|
||||
---
|
||||
|
@ -8,11 +8,16 @@ else
|
||||
mkdir -p $DEV_DATA_DIR
|
||||
fi
|
||||
|
||||
# By default configuration values generated from previous runs will be used in subsequent invocations
|
||||
# Setting the config reset
|
||||
if [ -z $DEV_CONFIG_RESET ]; then
|
||||
if [ -f ${DEV_DATA_DIR}/env_reset ]; then
|
||||
>&2 echo "importing existing configuration values from ${DEV_DATA_DIR}/env_reset"
|
||||
if [ -f $DEV_DATA_DIR/env_reset ]; then
|
||||
>&2 echo -e "\033[;96mimporting existing configuration values from ${DEV_DATA_DIR}/env_reset\033[;39m"
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
fi
|
||||
else
|
||||
>&2 echo -e "\033[;33mGenerating scratch configuration\033[;39m"
|
||||
confini-dump --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
||||
fi
|
||||
|
||||
# Handle wallet
|
||||
@ -21,7 +26,8 @@ if [ ! -f $WALLET_KEY_FILE ]; then
|
||||
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
||||
exit 1
|
||||
fi
|
||||
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
|
||||
#export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
|
||||
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
|
||||
|
||||
# Wallet dependent variable defaults
|
||||
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
@ -31,16 +37,7 @@ export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
||||
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
|
||||
|
||||
# Legacy variable defaults
|
||||
|
||||
|
||||
# Migration variable processing
|
||||
confini-dump --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
||||
echo "export CIC_TRUST_ADDRESS=$CIC_TRUST_ADDRESS
|
||||
export CIC_DEFAULT_TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
|
||||
export WALLET_KEY_FILE=$WALLET_KEY_FILE
|
||||
" >> ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
cat ${DEV_DATA_DIR}/env_reset
|
||||
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +a
|
||||
|
@ -1,13 +1,26 @@
|
||||
[dev]
|
||||
eth_account_contract_deployer =
|
||||
eth_account_reserve_minter =
|
||||
eth_account_accounts_index_writer =
|
||||
reserve_amount = 10000000000000000000000000000000000
|
||||
faucet_amount = 0
|
||||
token_mint_amount = 10000000000000000000000000000000000
|
||||
gas_amount = 100000000000000000000000
|
||||
token_amount = 100000000000000000000000
|
||||
eth_gas_price =
|
||||
data_dir =
|
||||
pip_extra_index_url =
|
||||
eth_provider_host =
|
||||
eth_provider_port =
|
||||
address_declarator =
|
||||
declarator_description = 0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||
|
||||
[chain]
|
||||
spec =
|
||||
|
||||
[rpc]
|
||||
provider =
|
||||
|
||||
[celery]
|
||||
broker_url =
|
||||
result_url =
|
||||
|
||||
[redis]
|
||||
host =
|
||||
port =
|
||||
|
||||
[cic]
|
||||
registry_address =
|
||||
trust_address =
|
||||
|
@ -1,5 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||
ARG DEV_DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
||||
|
||||
FROM $DEV_DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
@ -9,8 +10,6 @@ RUN echo 'deb-src http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main'
|
||||
RUN cat /etc/apt/sources.list.d/ethereum.list
|
||||
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
|
||||
|
||||
#RUN apt-get install solc
|
||||
|
||||
RUN mkdir -vp /usr/local/etc/cic
|
||||
|
||||
ENV CONFINI_DIR /usr/local/etc/cic/
|
||||
@ -18,29 +17,20 @@ ENV CONFINI_DIR /usr/local/etc/cic/
|
||||
|
||||
COPY config_template/ /usr/local/etc/cic/
|
||||
COPY requirements.txt .
|
||||
COPY override_requirements.txt .
|
||||
|
||||
RUN apt-get install libffi-dev
|
||||
|
||||
ARG pip_index_url=https://pypi.org/simple
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG PIP_INDEX_URL="https://pypi.org/simple"
|
||||
ARG pip_trusted_host=pypi.org
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--pre \
|
||||
--force-reinstall \
|
||||
--trusted-host $pip_trusted_host \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
--no-cache \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--force-reinstall \
|
||||
--pre \
|
||||
--trusted-host $pip_trusted_host \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r override_requirements.txt
|
||||
|
||||
|
||||
COPY . .
|
||||
RUN chmod +x *.sh
|
||||
|
@ -1,62 +0,0 @@
|
||||
SYNCER_LOOP_INTERVAL
|
||||
SSL_ENABLE_CLIENT
|
||||
SSL_CERT_FILE
|
||||
SSL_KEY_FILE
|
||||
SSL_PASSWORD
|
||||
SSL_CA_FILE
|
||||
BANCOR_DIR
|
||||
REDIS_HOST
|
||||
REDIS_PORT
|
||||
REDIS_DB
|
||||
PGP_EXPORTS_DIR
|
||||
PGP_PRIVATEKEY_FILE
|
||||
PGP_PASSPHRASE
|
||||
DATABASE_USER
|
||||
DATABASE_PASSWORD
|
||||
DATABASE_NAME
|
||||
DATABASE_HOST
|
||||
DATABASE_PORT
|
||||
DATABASE_ENGINE
|
||||
DATABASE_DRIVER
|
||||
DATABASE_DEBUG
|
||||
TASKS_AFRICASTALKING
|
||||
TASKS_SMS_DB
|
||||
TASKS_LOG
|
||||
TASKS_TRACE_QUEUE_STATUS
|
||||
TASKS_TRANSFER_CALLBACKS
|
||||
DEV_MNEMONIC
|
||||
DEV_ETH_RESERVE_ADDRESS
|
||||
DEV_ETH_ACCOUNTS_INDEX_ADDRESS
|
||||
DEV_ETH_RESERVE_AMOUNT
|
||||
DEV_ETH_ACCOUNT_BANCOR_DEPLOYER
|
||||
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
DEV_ETH_ACCOUNT_GAS_PROVIDER
|
||||
DEV_ETH_ACCOUNT_RESERVE_OWNER
|
||||
DEV_ETH_ACCOUNT_RESERVE_MINTER
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_OWNER
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
||||
DEV_ETH_ACCOUNT_SARAFU_OWNER
|
||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
||||
DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
||||
DEV_ETH_SARAFU_TOKEN_NAME
|
||||
DEV_ETH_SARAFU_TOKEN_SYMBOL
|
||||
DEV_ETH_SARAFU_TOKEN_DECIMALS
|
||||
DEV_ETH_SARAFU_TOKEN_ADDRESS
|
||||
DEV_PGP_PUBLICKEYS_ACTIVE_FILE
|
||||
DEV_PGP_PUBLICKEYS_TRUSTED_FILE
|
||||
DEV_PGP_PUBLICKEYS_ENCRYPT_FILE
|
||||
CIC_REGISTRY_ADDRESS
|
||||
CIC_APPROVAL_ESCROW_ADDRESS
|
||||
CIC_TOKEN_INDEX_ADDRESS
|
||||
CIC_ACCOUNTS_INDEX_ADDRESS
|
||||
CIC_DECLARATOR_ADDRESS
|
||||
CIC_CHAIN_SPEC
|
||||
ETH_PROVIDER
|
||||
ETH_ABI_DIR
|
||||
SIGNER_SOCKET_PATH
|
||||
SIGNER_SECRET
|
||||
SIGNER_PROVIDER
|
||||
CELERY_BROKER_URL
|
||||
CELERY_RESULT_URL
|
||||
META_PROVIDER
|
@ -1,4 +0,0 @@
|
||||
#eth-contract-registry==0.6.3a2
|
||||
#erc20-demurrage-token==0.0.2a3
|
||||
#eth-address-index==0.1.1a12
|
||||
|
@ -1,15 +1,14 @@
|
||||
cic-eth[tools]==0.12.4a11
|
||||
chainlib-eth>=0.0.9rc4,<0.1.0
|
||||
chainlib==0.0.9rc1,<0.1.0
|
||||
cic-eth[tools]==0.12.4a13
|
||||
chainlib-eth>=0.0.10a5,<0.1.0
|
||||
chainlib==0.0.10a3,<0.1.0
|
||||
eth-erc20>=0.1.2a3,<0.2.0
|
||||
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
||||
#eth-accounts-index>=0.1.2a2,<0.2.0
|
||||
eth-address-index>=0.2.4a1,<0.3.0
|
||||
cic-eth-registry>=0.6.1a5,<0.7.0
|
||||
cic-eth-registry>=0.6.1a6,<0.7.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
confini>=0.4.2rc3,<1.0.0
|
||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||
eth-token-index>=0.2.4a1,<=0.3.0
|
||||
okota>=0.2.4a5,<0.3.0
|
||||
okota>=0.2.4a15,<0.3.0
|
||||
|
@ -1,127 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -a
|
||||
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
fi
|
||||
|
||||
# Wait for the backend to be up, if we know where it is.
|
||||
if [ -z "${RPC_PROVIDER}" ]; then
|
||||
echo "\$RPC_PROVIDER not set!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
unset CONFINI_DIR
|
||||
|
||||
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
||||
IFS=: read -a p <<< "$RPC_PROVIDER"
|
||||
read -i "/" rpc_provider_port <<< "${p[2]}"
|
||||
rpc_provider_host=${p[1]:2}
|
||||
echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
|
||||
./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port" -t $WAIT_FOR_TIMEOUT
|
||||
fi
|
||||
|
||||
if [ "$TOKEN_TYPE" == "giftable_erc20_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="GFT"
|
||||
TOKEN_NAME="Giftable Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying default token $TOKEN_TYPE
|
||||
echo giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
|
||||
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
|
||||
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="DET"
|
||||
TOKEN_NAME="Demurrage Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying token $TOKEN_TYPE
|
||||
if [ -z $TOKEN_SINK_ADDRESS ]; then
|
||||
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
|
||||
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
|
||||
fi
|
||||
fi
|
||||
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
|
||||
else
|
||||
>&2 echo unknown token type $TOKEN_TYPE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
|
||||
giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
|
||||
|
||||
# Deploy address declarator registry
|
||||
>&2 echo "deploy address declarator contract"
|
||||
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv $declarator_description`
|
||||
|
||||
>&2 echo "deploy contract registry contract"
|
||||
#CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $RPC_PROVIDER -vv -s -u -w`
|
||||
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --address-declarator $DEV_DECLARATOR_ADDRESS -p $RPC_PROVIDER -vv -s -u -w`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
|
||||
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
|
||||
|
||||
>&2 echo "deploy account index contract"
|
||||
#DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w`
|
||||
DEV_ACCOUNT_INDEX_ADDRESS=`okota-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w --address-declarator $DEV_DECLARATOR_ADDRESS --token-address $DEV_RESERVE_ADDRESS`
|
||||
#>&2 echo "add deployer address as account index writer"
|
||||
#eth-accounts-index-writer $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
>&2 echo "deploy transfer auth contract"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
|
||||
|
||||
# Deploy token index contract
|
||||
>&2 echo "deploy token index contract"
|
||||
#DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
||||
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --address-declarator $DEV_DECLARATOR_ADDRESS`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
|
||||
>&2 echo "add reserve token to token index"
|
||||
eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
||||
|
||||
# Sarafu faucet contract
|
||||
>&2 echo "deploy token faucet contract"
|
||||
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
|
||||
|
||||
>&2 echo "set token faucet amount"
|
||||
sarafu-faucet-set $fee_price_arg -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
|
||||
|
||||
>&2 echo "register faucet in registry"
|
||||
eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
|
||||
|
||||
>&2 echo "set faucet as token minter"
|
||||
giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv $DEV_FAUCET_ADDRESS
|
||||
|
||||
|
||||
#echo "export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL" >> ${DEV_DATA_DIR}/env_reset
|
||||
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
||||
|
||||
echo "Writing env_reset file ..."
|
||||
|
||||
echo "export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS
|
||||
export CIC_DEFAULT_TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
|
||||
export TOKEN_NAME=$TOKEN_NAME
|
||||
" >> "${DEV_DATA_DIR}"/env_reset
|
||||
|
||||
set +a
|
||||
set +e
|
||||
|
||||
exec "$@"
|
@ -1,31 +1,54 @@
|
||||
#! /bin/bash
|
||||
|
||||
>&2 echo -e "\033[;96mRUNNING\033[;39m configurations"
|
||||
./config.sh
|
||||
if [ $? -ne "0" ]; then
|
||||
>&2 echo -e "\033[;31mFAILED\033[;39m configurations"
|
||||
exit 1;
|
||||
fi
|
||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m configurations"
|
||||
. ./util.sh
|
||||
|
||||
if [[ $((RUN_MASK & 1)) -eq 1 ]]
|
||||
then
|
||||
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 1 - contract deployment"
|
||||
./reset.sh
|
||||
if [ $? -ne "0" ]; then
|
||||
>&2 echo -e "\033[;31mFAILED\033[;39m RUN_MASK 1 - contract deployment"
|
||||
exit 1;
|
||||
fi
|
||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m RUN_MASK 1 - contract deployment"
|
||||
set -a
|
||||
DEV_DEBUG_FLAG=""
|
||||
DEV_DEBUG_LEVEL=${DEV_DEBUG_LEVEL=0}
|
||||
if [ $DEV_DEBUG_LEVEL -eq 1 ]; then
|
||||
DEV_DEBUG_FLAG="-v"
|
||||
elif [ $DEV_DEBUG_LEVEL -gt 1 ]; then
|
||||
DEV_DEBUG_FLAG="-vv"
|
||||
fi
|
||||
|
||||
if [[ $((RUN_MASK & 2)) -eq 2 ]]
|
||||
then
|
||||
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 2 - custodial service initialization"
|
||||
./seed_cic_eth.sh
|
||||
# disable override of config schema directory
|
||||
unset CONFINI_DIR
|
||||
|
||||
set +a
|
||||
|
||||
LAST_BIT_POS=5
|
||||
files=(deploy_contract_root deploy_contract_instance deploy_token init_custodial data_seeding)
|
||||
description=("global contracts" "instance specific contracts" "token deployment" "initialize custodial engine" "data seeding for development")
|
||||
|
||||
>&2 echo -e "\033[;96mRUNNING configurations\033[;39m"
|
||||
source ./config.sh
|
||||
if [ $? -ne "0" ]; then
|
||||
>&2 echo -e "\033[;31mFAILED\033[;39m RUN_MASK 2 - custodial service initialization"
|
||||
>&2 echo -e "\033[;31mFAILED configurations\033[;39m"
|
||||
exit 1;
|
||||
fi
|
||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m RUN_MASK 2 - custodial service initialization"
|
||||
>&2 echo -e "\033[;32mSUCCEEDED configurations\033[;39m"
|
||||
|
||||
>&2 echo -e "\033[;96mInitial configuration state\033[;39m"
|
||||
|
||||
confini-dump --schema-dir ./config
|
||||
|
||||
clear_pending_tx_hashes
|
||||
|
||||
|
||||
bit=1
|
||||
for ((i=0; i<$LAST_BIT_POS; i++)); do
|
||||
runlevel="RUNLEVEL $bit"
|
||||
if [[ $((RUN_MASK & $bit)) -eq ${bit} ]]; then
|
||||
s="$runlevel - ${description[$i]}"
|
||||
>&2 echo -e "\033[;96mRUNNING $s\033[;39m"
|
||||
source $((i+1))_${files[$i]}.sh
|
||||
if [ $? -ne "0" ]; then
|
||||
>&2 echo -e "\033[;31mFAILED $s\033[;39m"
|
||||
exit 1;
|
||||
fi
|
||||
>&2 echo -e "\033[;32mSUCCEEDED $s\033[;39m"
|
||||
>&2 echo -e "\033[;96mConfiguration state after $runlevel execution\033[;39m"
|
||||
confini-dump --schema-dir ./config
|
||||
fi
|
||||
bit=$((bit*2))
|
||||
done
|
||||
|
@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# defaults
|
||||
source ${DEV_DATA_DIR}/env_reset
|
||||
cat ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
# Debug flag
|
||||
debug='-vv'
|
||||
empty_config_dir=$CONFINI_DIR/empty
|
||||
|
||||
set -e
|
||||
set -a
|
||||
|
||||
unset CONFINI_DIR
|
||||
|
||||
# get required addresses from registries
|
||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry`
|
||||
account_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry`
|
||||
reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
||||
|
||||
>&2 echo "Token registry: $token_index_address"
|
||||
>&2 echo "Account registry: $account_index_address"
|
||||
>&2 echo "Reserve address: $reserve_address ($TOKEN_SYMBOL)"
|
||||
|
||||
>&2 echo "create account for gas gifter"
|
||||
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
|
||||
#DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
|
||||
|
||||
|
||||
>&2 echo "create account for sarafu gifter"
|
||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
|
||||
>&2 echo "create account for approval escrow owner"
|
||||
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
|
||||
|
||||
#>&2 echo "create account for faucet owner"
|
||||
#DEV_ETH_ACCOUNT_FAUCET_OWNER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
#echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_FAUCET_OWNER >> $env_out_file
|
||||
#cic-eth-tag FAUCET_GIFTER $DEV_ETH_ACCOUNT_FAUCET_OWNER
|
||||
|
||||
>&2 echo "create account for accounts index writer"
|
||||
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
||||
>&2 echo "add acccounts index writer account as writer on contract"
|
||||
#eth-accounts-index-writer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $account_index_address -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
||||
|
||||
# Transfer gas to custodial gas provider adddress
|
||||
_CONFINI_DIR=$CONFINI_DIR
|
||||
unset CONFINI_DIR
|
||||
>&2 echo gift gas to gas gifter
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $DEV_GAS_AMOUNT
|
||||
|
||||
>&2 echo gift gas to sarafu token owner
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $DEV_GAS_AMOUNT
|
||||
|
||||
>&2 echo gift gas to account index owner
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $DEV_GAS_AMOUNT
|
||||
|
||||
|
||||
# Send token to token creator
|
||||
>&2 echo "gift tokens to sarafu owner"
|
||||
echo "giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT"
|
||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT
|
||||
|
||||
# Send token to token gifter
|
||||
>&2 echo "gift tokens to keystore address"
|
||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $DEV_TOKEN_AMOUNT
|
||||
|
||||
>&2 echo "set sarafu token to reserve token (temporarily while bancor contracts are not connected)"
|
||||
export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
||||
|
||||
# Transfer tokens to gifter address
|
||||
>&2 echo "transfer tokens to token gifter address"
|
||||
>&2 erc20-transfer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER --fee-limit 100000 -e $reserve_address -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${DEV_TOKEN_AMOUNT:0:-1}
|
||||
|
||||
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock INIT
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock SEND
|
||||
cic-eth-ctl -vv -i $CHAIN_SPEC unlock QUEUE
|
||||
|
||||
#confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
|
||||
|
||||
set +a
|
||||
set +e
|
42
apps/contract-migration/util.sh
Normal file
42
apps/contract-migration/util.sh
Normal file
@ -0,0 +1,42 @@
|
||||
function must_address() {
|
||||
if [[ ! "$1" =~ ^(0x)?[0-9a-fA-F]{40}$ ]]; then
|
||||
>&2 echo -e "\033[;31mvalue '$1' for $2 is not an address\033[;39m"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function must_hash_256() {
|
||||
if [[ ! "$1" =~ ^(0x)?[0-9a-fA-F]{64}$ ]]; then
|
||||
>&2 echo -e "\033[;31mvalue '$1' for $2 is not a 256-bit digest\033[;39m"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function must_eth_rpc() {
|
||||
if [ -z "${RPC_PROVIDER}" ]; then
|
||||
echo "\$RPC_PROVIDER not set!"
|
||||
exit 1
|
||||
fi
|
||||
# Wait for the backend to be up, if we know where it is.
|
||||
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
||||
WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
|
||||
IFS=: read -a p <<< "$RPC_PROVIDER"
|
||||
read -i "/" rpc_provider_port <<< "${p[2]}"
|
||||
rpc_provider_host=${p[1]:2}
|
||||
echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
|
||||
./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port" -t $WAIT_FOR_TIMEOUT
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function clear_pending_tx_hashes() {
|
||||
truncate -s 0 $DEV_DATA_DIR/hashes
|
||||
}
|
||||
|
||||
|
||||
function add_pending_tx_hash() {
|
||||
must_hash_256 $1
|
||||
echo $1 >> $DEV_DATA_DIR/hashes
|
||||
}
|
@ -136,13 +136,13 @@ First, make a note of the **block height** before running anything:
|
||||
|
||||
To import, run to _completion_:
|
||||
|
||||
`python eth/import_users.py -v -c config -p <eth_provider> -r <cic_registry_address> -y ../contract-migration/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||
`python eth/import_users.py -v -p <eth_provider> -r <cic_registry_address> -y ../contract-migration/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||
|
||||
After the script completes, keystore files for all generated accouts will be found in `<datadir>/keystore`, all with `foo` as password (would set it empty, but believe it or not some interfaces out there won't work unless you have one).
|
||||
|
||||
Then run:
|
||||
|
||||
`python eth/import_balance.py -v -c config -r <cic_registry_address> -p <eth_provider> --token-symbol <token_symbol> --offset <block_height_at_start> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||
`python eth/import_balance.py -v -r <cic_registry_address> -p <eth_provider> --token-symbol <token_symbol> --offset <block_height_at_start> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||
|
||||
|
||||
|
||||
@ -150,11 +150,11 @@ Then run:
|
||||
|
||||
Run in sequence, in first terminal:
|
||||
|
||||
`python cic_eth/import_balance.py -v -c config -p <eth_provider> -r <cic_registry_address> --token-symbol <token_symbol> -y ../contract-migration/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c --head out`
|
||||
`python cic_eth/import_balance.py -v -p <eth_provider> -r <cic_registry_address> --token-symbol <token_symbol> -y ../contract-migration/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c --head out`
|
||||
|
||||
In another terminal:
|
||||
|
||||
`python cic_eth/import_users.py -v -c config --redis-host-callback <redis_hostname_in_docker> out`
|
||||
`python cic_eth/import_users.py -v --redis-host-callback <redis_hostname_in_docker> out`
|
||||
|
||||
The `redis_hostname_in_docker` value is the hostname required to reach the redis server from within the docker cluster, and should be `redis` if you left the docker-compose unchanged. The `import_users` script will receive the address of each newly created custodial account on a redis subscription fed by a callback task in the `cic_eth` account creation task chain.
|
||||
|
||||
|
@ -47,12 +47,14 @@ from eth_token_index import TokenUniqueSymbolIndex
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = './config'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
base_config_dir = os.path.join(root_dir, 'config')
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-c', type=str, help='config override directory')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||
@ -71,22 +73,21 @@ if args.v == True:
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
# override args
|
||||
config = None
|
||||
if args.c != None:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'), override_config_dir=args.c)
|
||||
else:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'RPC_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y'),
|
||||
'WALLET_KEY_FILE': getattr(args, 'y'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
#app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
@ -99,7 +100,7 @@ if args.y != None:
|
||||
signer = EIP155Signer(keystore)
|
||||
|
||||
queue = args.q
|
||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||
chain_str = config.get('CHAIN_SPEC')
|
||||
block_offset = 0
|
||||
if args.head:
|
||||
block_offset = -1
|
||||
@ -192,7 +193,7 @@ def progress_callback(block_number, tx_index):
|
||||
def main():
|
||||
global chain_str, block_offset, user_dir
|
||||
|
||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||
nonce_oracle = RPCNonceOracle(signer_address, conn)
|
||||
|
||||
|
@ -10,7 +10,7 @@ import time
|
||||
import phonenumbers
|
||||
from glob import glob
|
||||
|
||||
# third-party imports
|
||||
# external imports
|
||||
import redis
|
||||
import confini
|
||||
import celery
|
||||
@ -23,15 +23,23 @@ from cic_types.models.person import Person
|
||||
from cic_eth.api.api_task import Api
|
||||
from chainlib.chain import ChainSpec
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
from cic_types import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from common.dirs import initialize_dirs
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = '/usr/local/etc/cic'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
base_config_dir = os.path.join(root_dir, 'config')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||
argparser.add_argument('-c', type=str, help='config override directory')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
||||
argparser.add_argument('-f', action='store_true', help='force clear previous state')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
@ -52,16 +60,21 @@ if args.v:
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = args.c
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config = None
|
||||
if args.c != None:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'), override_config_dir=args.c)
|
||||
else:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||
'REDIS_DB': getattr(args, 'redis_db'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli')
|
||||
config.add(args.user_dir, '_USERDIR', True)
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
redis_host = config.get('REDIS_HOST')
|
||||
@ -71,44 +84,25 @@ r = redis.Redis(redis_host, redis_port, redis_db)
|
||||
|
||||
ps = r.pubsub()
|
||||
|
||||
user_new_dir = os.path.join(args.user_dir, 'new')
|
||||
os.makedirs(user_new_dir)
|
||||
|
||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||
os.makedirs(meta_dir)
|
||||
|
||||
custom_dir = os.path.join(args.user_dir, 'custom')
|
||||
os.makedirs(custom_dir)
|
||||
os.makedirs(os.path.join(custom_dir, 'new'))
|
||||
os.makedirs(os.path.join(custom_dir, 'meta'))
|
||||
|
||||
phone_dir = os.path.join(args.user_dir, 'phone')
|
||||
os.makedirs(os.path.join(phone_dir, 'meta'))
|
||||
|
||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||
os.stat(user_old_dir)
|
||||
|
||||
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||
os.makedirs(txs_dir)
|
||||
|
||||
user_dir = args.user_dir
|
||||
|
||||
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||
old_chain_str = str(old_chain_spec)
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
chain_str = str(chain_spec)
|
||||
|
||||
batch_size = args.batch_size
|
||||
batch_delay = args.batch_delay
|
||||
|
||||
dirs = initialize_dirs(config.get('_USERDIR'), force_reset=args.f)
|
||||
|
||||
|
||||
def register_eth(i, u):
|
||||
redis_channel = str(uuid.uuid4())
|
||||
ps.subscribe(redis_channel)
|
||||
#ps.get_message()
|
||||
api = Api(
|
||||
config.get('CIC_CHAIN_SPEC'),
|
||||
config.get('CHAIN_SPEC'),
|
||||
queue=args.q,
|
||||
callback_param='{}:{}:{}:{}'.format(args.redis_host_callback, args.redis_port_callback, redis_db, redis_channel),
|
||||
callback_task='cic_eth.callbacks.redis.redis',
|
||||
@ -145,7 +139,7 @@ def register_eth(i, u):
|
||||
if __name__ == '__main__':
|
||||
|
||||
user_tags = {}
|
||||
f = open(os.path.join(user_dir, 'tags.csv'), 'r')
|
||||
f = open(os.path.join(config.get('_USERDIR'), 'tags.csv'), 'r')
|
||||
while True:
|
||||
r = f.readline().rstrip()
|
||||
if len(r) == 0:
|
||||
@ -158,7 +152,8 @@ if __name__ == '__main__':
|
||||
|
||||
i = 0
|
||||
j = 0
|
||||
for x in os.walk(user_old_dir):
|
||||
|
||||
for x in os.walk(dirs['old']):
|
||||
for y in x[2]:
|
||||
if y[len(y)-5:] != '.json':
|
||||
continue
|
||||
@ -182,7 +177,7 @@ if __name__ == '__main__':
|
||||
|
||||
new_address_clean = strip_0x(new_address)
|
||||
filepath = os.path.join(
|
||||
user_new_dir,
|
||||
dirs['new'],
|
||||
new_address_clean[:2].upper(),
|
||||
new_address_clean[2:4].upper(),
|
||||
new_address_clean.upper() + '.json',
|
||||
@ -194,17 +189,17 @@ if __name__ == '__main__':
|
||||
f.write(json.dumps(o))
|
||||
f.close()
|
||||
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
|
||||
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), MetadataPointer.PERSON)
|
||||
meta_filepath = os.path.join(dirs['meta'], '{}.json'.format(new_address_clean.upper()))
|
||||
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||
|
||||
phone_object = phonenumbers.parse(u.tel)
|
||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||
meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key)
|
||||
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), MetadataPointer.PHONE)
|
||||
meta_phone_filepath = os.path.join(dirs['phone'], 'meta', meta_phone_key)
|
||||
|
||||
filepath = os.path.join(
|
||||
phone_dir,
|
||||
dirs['phone'],
|
||||
'new',
|
||||
meta_phone_key[:2].upper(),
|
||||
meta_phone_key[2:4].upper(),
|
||||
@ -220,11 +215,11 @@ if __name__ == '__main__':
|
||||
|
||||
|
||||
# custom data
|
||||
custom_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.custom')
|
||||
custom_filepath = os.path.join(custom_dir, 'meta', custom_key)
|
||||
custom_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), MetadataPointer.CUSTOM)
|
||||
custom_filepath = os.path.join(dirs['custom'], 'meta', custom_key)
|
||||
|
||||
filepath = os.path.join(
|
||||
custom_dir,
|
||||
dirs['custom'],
|
||||
'new',
|
||||
custom_key[:2].upper(),
|
||||
custom_key[2:4].upper(),
|
||||
|
@ -3,6 +3,10 @@ from chainlib.jsonrpc import JSONRPCException
|
||||
from eth_erc20 import ERC20
|
||||
from eth_accounts_index import AccountsIndex
|
||||
from eth_token_index import TokenUniqueSymbolIndex
|
||||
import logging
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ERC20Token:
|
||||
|
||||
@ -46,7 +50,8 @@ class IndexCache:
|
||||
try:
|
||||
r = conn.do(o)
|
||||
entries.append(self.parse(r, conn))
|
||||
except JSONRPCException:
|
||||
except JSONRPCException as e:
|
||||
logg.debug('foo {}'.format(e))
|
||||
return entries
|
||||
i += 1
|
||||
|
||||
|
@ -11,6 +11,18 @@ queue = 'cic-eth'
|
||||
name = 'account'
|
||||
|
||||
|
||||
def create_user(chain_spec, redis_host_callback, redis_port_callback, redis_db, redis_channel):
|
||||
api = Api(
|
||||
str(chain_spec),
|
||||
queue=queue,
|
||||
callback_param='{}:{}:{}:{}'.format(redis_host_callback, redis_port_callback, redis_db, redis_channel),
|
||||
callback_task='cic_eth.callbacks.redis.redis',
|
||||
callback_queue=queue,
|
||||
)
|
||||
|
||||
return api.create_account(register=True)
|
||||
|
||||
|
||||
def do(token_pair, sender, recipient, sender_balance, aux, block_number):
|
||||
"""Triggers creation and registration of new account through the custodial cic-eth component.
|
||||
|
||||
@ -24,14 +36,6 @@ def do(token_pair, sender, recipient, sender_balance, aux, block_number):
|
||||
See local.noop.do for details on parameters and return values.
|
||||
"""
|
||||
logg.debug('running {} {} {}'.format(__name__, token_pair, sender, recipient))
|
||||
api = Api(
|
||||
str(aux['chain_spec']),
|
||||
queue=queue,
|
||||
callback_param='{}:{}:{}:{}'.format(aux['redis_host_callback'], aux['redis_port_callback'], aux['redis_db'], aux['redis_channel']),
|
||||
callback_task='cic_eth.callbacks.redis.redis',
|
||||
callback_queue=queue,
|
||||
)
|
||||
|
||||
t = api.create_account(register=True)
|
||||
t = create_user(aux['chain_spec'], aux['redis_host_callback'], aux['redis_port_callback'], aux['redis_db'], aux['redis_channel'])
|
||||
|
||||
return (None, t, sender_balance, )
|
||||
|
@ -21,6 +21,9 @@ import chainlib.eth.cli
|
||||
import cic_eth.cli
|
||||
from cic_eth.cli.chain import chain_interface
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from eth_accounts_index import AccountsIndex
|
||||
from erc20_faucet import Faucet
|
||||
from cic_eth.api import Api
|
||||
|
||||
# local imports
|
||||
#import common
|
||||
@ -109,6 +112,12 @@ def main():
|
||||
logg.info('using account registry {}'.format(account_registry))
|
||||
account_cache = AccountRegistryCache(chain_spec, account_registry)
|
||||
|
||||
faucet = registry.lookup('Faucet')
|
||||
if faucet == ZERO_ADDRESS:
|
||||
logg.warning('Faucet entry missing from value missing from contract registry {}. New account registrations will need external mechanism for initial token balances.'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||
else:
|
||||
logg.info('using faucet {}'.format(faucet))
|
||||
|
||||
# Set up provisioner for common task input data
|
||||
TrafficProvisioner.oracles['token'] = token_cache
|
||||
TrafficProvisioner.oracles['account'] = account_cache
|
||||
@ -124,6 +133,27 @@ def main():
|
||||
|
||||
syncer = HeadSyncer(syncer_backend, chain_interface, block_callback=handler.refresh)
|
||||
syncer.add_filter(handler)
|
||||
|
||||
# bootstrap two accounts if starting from scratch
|
||||
c = AccountsIndex(chain_spec)
|
||||
o = c.entry_count(account_registry)
|
||||
r = conn.do(o)
|
||||
|
||||
logg.debug('entry count {}'.format(c.parse_entry_count(r)))
|
||||
|
||||
if c.parse_entry_count(r) == 0:
|
||||
if faucet == ZERO_ADDRESS:
|
||||
raise ValueError('No accounts exist in network and no faucet exists. It will be impossible for any created accounts to trade.')
|
||||
c = Faucet(chain_spec)
|
||||
o = c.token_amount(faucet)
|
||||
r = conn.do(o)
|
||||
if c.parse_token_amount(r) == 0:
|
||||
raise ValueError('No accounts exist in network and faucet amount is set to 0. It will be impossible for any created accounts to trade.')
|
||||
|
||||
api = Api(str(chain_spec), queue=config.get('CELERY_QUEUE'))
|
||||
api.create_account(register=True)
|
||||
api.create_account(register=True)
|
||||
|
||||
syncer.loop(1, conn)
|
||||
|
||||
|
||||
|
@ -57,8 +57,8 @@ elif args.v:
|
||||
config = Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'RPC_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||
@ -90,7 +90,7 @@ signer = EIP155Signer(keystore)
|
||||
|
||||
block_offset = -1 if args.head else args.offset
|
||||
|
||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||
chain_str = config.get('CHAIN_SPEC')
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
ImportTask.chain_spec = chain_spec
|
||||
old_chain_spec_str = args.old_chain_spec
|
||||
@ -99,16 +99,12 @@ old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
|
||||
MetadataTask.meta_host = config.get('META_HOST')
|
||||
MetadataTask.meta_port = config.get('META_PORT')
|
||||
|
||||
txs_dir = os.path.join(args.import_dir, 'txs')
|
||||
os.makedirs(txs_dir, exist_ok=True)
|
||||
sys.stdout.write(f'created txs dir: {txs_dir}')
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
get_celery_worker_status(celery_app)
|
||||
|
||||
|
||||
def main():
|
||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||
ImportTask.balance_processor = BalanceProcessor(conn,
|
||||
chain_spec,
|
||||
config.get('CIC_REGISTRY_ADDRESS'),
|
||||
|
@ -14,7 +14,9 @@ from celery import Task
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.tx import raw, unpack
|
||||
from cic_types.models.person import Person, generate_metadata_pointer
|
||||
from cic_types.models.person import Person, identity_tag
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from hexathon import add_0x, strip_0x
|
||||
|
||||
# local imports
|
||||
@ -55,7 +57,7 @@ class MetadataTask(ImportTask):
|
||||
|
||||
|
||||
def old_address_from_phone(base_path: str, phone_number: str):
|
||||
pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
||||
pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), MetadataPointer.PHONE)
|
||||
phone_idx_path = os.path.join(f'{base_path}/phone/{pid_x[:2]}/{pid_x[2:4]}/{pid_x}')
|
||||
with open(phone_idx_path, 'r') as f:
|
||||
old_address = f.read()
|
||||
@ -73,9 +75,13 @@ def generate_person_metadata(self, blockchain_address: str, phone_number: str):
|
||||
person = Person.deserialize(person_metadata)
|
||||
if not person.identities.get('evm'):
|
||||
person.identities['evm'] = {}
|
||||
sub_chain_str = f'{self.chain_spec.common_name()}:{self.chain_spec.network_id()}'
|
||||
person.identities['evm'][sub_chain_str] = [add_0x(blockchain_address)]
|
||||
blockchain_address = strip_0x(blockchain_address)
|
||||
chain_spec = self.chain_spec.asdict()
|
||||
arch = chain_spec.get('arch')
|
||||
fork = chain_spec.get('fork')
|
||||
tag = identity_tag(chain_spec)
|
||||
person.identities[arch][fork] = {
|
||||
tag: [blockchain_address]
|
||||
}
|
||||
file_path = os.path.join(
|
||||
self.import_dir,
|
||||
'new',
|
||||
@ -102,7 +108,7 @@ def generate_preferences_data(self, data: tuple):
|
||||
blockchain_address: str = data[0]
|
||||
preferences = data[1]
|
||||
preferences_dir = os.path.join(self.import_dir, 'preferences')
|
||||
preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), ':cic.preferences')
|
||||
preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), MetadataPointer.PREFERENCES)
|
||||
preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key)
|
||||
filepath = os.path.join(
|
||||
preferences_dir,
|
||||
@ -137,7 +143,7 @@ def generate_ussd_data(self, blockchain_address: str, phone_number: str):
|
||||
preferred_language = random.sample(["en", "sw"], 1)[0]
|
||||
preferences = {'preferred_language': preferred_language}
|
||||
with open(ussd_data_file, file_op) as ussd_data_file:
|
||||
ussd_data_file.write(f'{phone_number}, { 1}, {preferred_language}, {False}\n')
|
||||
ussd_data_file.write(f'{phone_number}, 1, {preferred_language}, False\n')
|
||||
logg.debug(f'written ussd data for address: {blockchain_address}')
|
||||
return blockchain_address, preferences
|
||||
|
||||
@ -163,7 +169,7 @@ def opening_balance_tx(self, blockchain_address: str, phone_number: str, serial:
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def resolve_phone(self, phone_number: str):
|
||||
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
||||
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), MetadataPointer.PHONE)
|
||||
url = parse.urljoin(self.meta_url(), identifier)
|
||||
logg.debug(f'attempt getting phone pointer at: {url} for phone: {phone_number}')
|
||||
r = request.urlopen(url)
|
||||
|
@ -17,6 +17,7 @@ from cic_types.models.person import Person
|
||||
from confini import Config
|
||||
|
||||
# local imports
|
||||
from common.dirs import initialize_dirs
|
||||
from import_util import get_celery_worker_status
|
||||
|
||||
default_config_dir = './config'
|
||||
@ -37,6 +38,7 @@ arg_parser.add_argument('--env-prefix',
|
||||
dest='env_prefix',
|
||||
type=str,
|
||||
help='environment prefix for variables to overwrite configuration.')
|
||||
arg_parser.add_argument('-f', action='store_true', help='force clear previous state')
|
||||
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
|
||||
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
@ -70,21 +72,7 @@ config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||
|
||||
old_account_dir = os.path.join(args.import_dir, 'old')
|
||||
os.stat(old_account_dir)
|
||||
logg.debug(f'created old system data dir: {old_account_dir}')
|
||||
|
||||
new_account_dir = os.path.join(args.import_dir, 'new')
|
||||
os.makedirs(new_account_dir, exist_ok=True)
|
||||
logg.debug(f'created new system data dir: {new_account_dir}')
|
||||
|
||||
person_metadata_dir = os.path.join(args.import_dir, 'meta')
|
||||
os.makedirs(person_metadata_dir, exist_ok=True)
|
||||
logg.debug(f'created person metadata dir: {person_metadata_dir}')
|
||||
|
||||
preferences_dir = os.path.join(args.import_dir, 'preferences')
|
||||
os.makedirs(os.path.join(preferences_dir, 'meta'), exist_ok=True)
|
||||
logg.debug(f'created preferences metadata dir: {preferences_dir}')
|
||||
dirs = initialize_dirs(args.import_dir, force_reset=args.f)
|
||||
|
||||
valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")
|
||||
|
||||
@ -157,7 +145,7 @@ def register_account(person: Person):
|
||||
if __name__ == '__main__':
|
||||
i = 0
|
||||
j = 0
|
||||
for x in os.walk(old_account_dir):
|
||||
for x in os.walk(dirs['old']):
|
||||
for y in x[2]:
|
||||
if y[len(y) - 5:] != '.json':
|
||||
continue
|
||||
|
42
apps/data-seeding/common/dirs.py
Normal file
42
apps/data-seeding/common/dirs.py
Normal file
@ -0,0 +1,42 @@
|
||||
# standard imports
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
def initialize_dirs(user_dir, force_reset=False):
|
||||
|
||||
dirs = {}
|
||||
|
||||
dirs['old'] = os.path.join(user_dir, 'old')
|
||||
dirs['new'] = os.path.join(user_dir, 'new')
|
||||
dirs['meta'] = os.path.join(user_dir, 'meta')
|
||||
dirs['custom'] = os.path.join(user_dir, 'custom')
|
||||
dirs['preferences'] = os.path.join(user_dir, 'preferences')
|
||||
dirs['txs'] = os.path.join(user_dir, 'txs')
|
||||
dirs['keyfile'] = os.path.join(user_dir, 'keystore')
|
||||
dirs['custom_new'] = os.path.join(dirs['custom'], 'new')
|
||||
dirs['custom_meta'] = os.path.join(dirs['custom'], 'meta')
|
||||
dirs['preferences_meta'] = os.path.join(dirs['preferences'], 'meta')
|
||||
dirs['preferences_new'] = os.path.join(dirs['preferences'], 'new')
|
||||
|
||||
try:
|
||||
os.stat(dirs['old'])
|
||||
except FileNotFoundError:
|
||||
sys.stderr.write('no users to import. please run create_import_users.py first\n')
|
||||
sys.exit(1)
|
||||
|
||||
if force_reset:
|
||||
for d in dirs.keys():
|
||||
if d == 'old':
|
||||
continue
|
||||
try:
|
||||
shutil.rmtree(dirs[d])
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
for d in dirs.keys():
|
||||
if d == 'old':
|
||||
continue
|
||||
os.makedirs(dirs[d])
|
||||
|
||||
return dirs
|
2
apps/data-seeding/config/chain.ini
Normal file
2
apps/data-seeding/config/chain.ini
Normal file
@ -0,0 +1,2 @@
|
||||
[chain]
|
||||
spec =
|
@ -1,10 +1,2 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
token_index_address =
|
||||
accounts_index_address =
|
||||
declarator_address =
|
||||
approval_escrow_address =
|
||||
chain_spec =
|
||||
tx_retry_delay =
|
||||
trust_address =
|
||||
user_ussd_svc_service_port =
|
||||
|
@ -1,2 +0,0 @@
|
||||
[eth]
|
||||
provider = http://localhost:63545
|
2
apps/data-seeding/config/rpc.ini
Normal file
2
apps/data-seeding/config/rpc.ini
Normal file
@ -0,0 +1,2 @@
|
||||
[rpc]
|
||||
provider =
|
@ -20,7 +20,7 @@ from cic_types.models.person import (
|
||||
generate_vcard_from_contact_data,
|
||||
get_contact_data_from_vcard,
|
||||
)
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.address import to_checksum_address, strip_0x
|
||||
import phonenumbers
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
@ -30,7 +30,6 @@ fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
|
||||
|
||||
default_config_dir = './config'
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='Config dir')
|
||||
argparser.add_argument('--tag', type=str, action='append',
|
||||
@ -54,7 +53,6 @@ config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
logg.debug('loaded config\n{}'.format(config))
|
||||
|
||||
|
||||
dt_now = datetime.datetime.utcnow()
|
||||
dt_then = dt_now - datetime.timedelta(weeks=150)
|
||||
ts_now = int(dt_now.timestamp())
|
||||
@ -105,7 +103,6 @@ def genId(addr, typ):
|
||||
|
||||
|
||||
def genDate():
|
||||
|
||||
ts = random.randint(ts_then, ts_now)
|
||||
return int(datetime.datetime.fromtimestamp(ts).timestamp())
|
||||
|
||||
@ -148,9 +145,7 @@ def genDob():
|
||||
|
||||
|
||||
def gen():
|
||||
old_blockchain_address = '0x' + os.urandom(20).hex()
|
||||
old_blockchain_checksum_address = to_checksum_address(
|
||||
old_blockchain_address)
|
||||
old_blockchain_address = os.urandom(20).hex()
|
||||
gender = random.choice(['female', 'male', 'other'])
|
||||
phone = genPhone()
|
||||
v = genPersonal(phone)
|
||||
@ -164,9 +159,9 @@ def gen():
|
||||
p.gender = gender
|
||||
p.identities = {
|
||||
'evm': {
|
||||
'oldchain:1': [
|
||||
old_blockchain_checksum_address,
|
||||
],
|
||||
'foo': {
|
||||
'1:oldchain': [old_blockchain_address],
|
||||
},
|
||||
},
|
||||
}
|
||||
p.products = [fake.random_element(elements=OrderedDict(
|
||||
@ -207,7 +202,7 @@ def gen():
|
||||
# fake.local_latitude()
|
||||
p.location['longitude'] = (random.random() * 360) - 180
|
||||
|
||||
return (old_blockchain_checksum_address, phone, p)
|
||||
return old_blockchain_address, phone, p
|
||||
|
||||
|
||||
def prepareLocalFilePath(datadir, address):
|
||||
@ -242,7 +237,7 @@ if __name__ == '__main__':
|
||||
except Exception as e:
|
||||
logg.warning('generate failed, trying anew: {}'.format(e))
|
||||
continue
|
||||
uid = eth[2:].upper()
|
||||
uid = strip_0x(eth).upper()
|
||||
|
||||
print(o)
|
||||
|
||||
|
@ -7,20 +7,23 @@ RUN mkdir -vp /usr/local/etc/cic
|
||||
|
||||
COPY package.json \
|
||||
package-lock.json \
|
||||
.
|
||||
./
|
||||
|
||||
|
||||
RUN npm ci --production
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
|
||||
|
||||
COPY common/ cic_ussd/common/
|
||||
COPY requirements.txt .
|
||||
COPY config/ /usr/local/etc/data-seeding
|
||||
COPY config/ config
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip pip install \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL -r requirements.txt
|
||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||
|
||||
RUN pip install --index-url $PIP_INDEX_URL \
|
||||
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
|
@ -47,12 +47,15 @@ from erc20_faucet import Faucet
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = './config'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
base_config_dir = os.path.join(root_dir, 'config')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('-c', type=str, help='config override directory')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||
@ -71,21 +74,24 @@ if args.v == True:
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config = None
|
||||
logg.debug('config dir {}'.format(base_config_dir))
|
||||
if args.c != None:
|
||||
config = confini.Config(base_config_dir, env_prefix=os.environ.get('CONFINI_ENV_PREFIX'), override_dirs=args.c)
|
||||
else:
|
||||
config = confini.Config(base_config_dir, env_prefix=os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
|
||||
# override args
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'RPC_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
#app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
@ -98,7 +104,7 @@ if args.y != None:
|
||||
signer = EIP155Signer(keystore)
|
||||
|
||||
queue = args.q
|
||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||
chain_str = config.get('CHAIN_SPEC')
|
||||
block_offset = 0
|
||||
if args.head:
|
||||
block_offset = -1
|
||||
@ -203,7 +209,7 @@ def progress_callback(block_number, tx_index):
|
||||
def main():
|
||||
global chain_str, block_offset, user_dir
|
||||
|
||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||
nonce_oracle = RPCNonceOracle(signer_address, conn)
|
||||
|
||||
|
@ -8,6 +8,7 @@ import uuid
|
||||
import datetime
|
||||
import time
|
||||
import phonenumbers
|
||||
import shutil
|
||||
from glob import glob
|
||||
|
||||
# external imports
|
||||
@ -23,22 +24,30 @@ from chainlib.eth.connection import EthHTTPConnection
|
||||
from chainlib.eth.gas import RPCGasOracle
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
from cic_types import MetadataPointer
|
||||
from eth_accounts_index.registry import AccountRegistry
|
||||
from eth_contract_registry import Registry
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict
|
||||
|
||||
# local imports
|
||||
from common.dirs import initialize_dirs
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = '/usr/local/etc/cic'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
base_config_dir = os.path.join(root_dir, 'config')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('-c', type=str, help='config override directory')
|
||||
argparser.add_argument('-f', action='store_true', help='force clear previous state')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:foo:1:oldchain', help='chain spec')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
||||
argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address')
|
||||
argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node')
|
||||
@ -53,40 +62,23 @@ if args.v:
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = args.c
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config = None
|
||||
if args.c != None:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'), override_config_dir=args.c)
|
||||
else:
|
||||
config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
||||
}
|
||||
config.dict_override(args_override, 'cli')
|
||||
config.add(args.user_dir, '_USERDIR', True)
|
||||
|
||||
user_new_dir = os.path.join(args.user_dir, 'new')
|
||||
os.makedirs(user_new_dir)
|
||||
#user_dir = args.user_dir
|
||||
|
||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||
os.makedirs(meta_dir)
|
||||
|
||||
custom_dir = os.path.join(args.user_dir, 'custom')
|
||||
os.makedirs(custom_dir)
|
||||
os.makedirs(os.path.join(custom_dir, 'new'))
|
||||
os.makedirs(os.path.join(custom_dir, 'meta'))
|
||||
|
||||
phone_dir = os.path.join(args.user_dir, 'phone')
|
||||
os.makedirs(os.path.join(phone_dir, 'meta'))
|
||||
|
||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||
os.stat(user_old_dir)
|
||||
|
||||
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||
os.makedirs(txs_dir)
|
||||
|
||||
user_dir = args.user_dir
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
chain_str = str(chain_spec)
|
||||
|
||||
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||
@ -113,8 +105,8 @@ r = rpc.do(o)
|
||||
account_registry_address = registry.parse_address_of(r)
|
||||
logg.info('using account registry {}'.format(account_registry_address))
|
||||
|
||||
keyfile_dir = os.path.join(config.get('_USERDIR'), 'keystore')
|
||||
os.makedirs(keyfile_dir)
|
||||
dirs = initialize_dirs(config.get('_USERDIR'), force_reset=args.f)
|
||||
dirs['phone'] = os.path.join(config.get('_USERDIR'))
|
||||
|
||||
def register_eth(i, u):
|
||||
|
||||
@ -129,7 +121,7 @@ def register_eth(i, u):
|
||||
|
||||
pk = keystore.get(address)
|
||||
keyfile_content = to_keyfile_dict(pk, 'foo')
|
||||
keyfile_path = os.path.join(keyfile_dir, '{}.json'.format(address))
|
||||
keyfile_path = os.path.join(dirs['keyfile'], '{}.json'.format(address))
|
||||
f = open(keyfile_path, 'w')
|
||||
json.dump(keyfile_content, f)
|
||||
f.close()
|
||||
@ -142,7 +134,7 @@ def register_eth(i, u):
|
||||
if __name__ == '__main__':
|
||||
|
||||
user_tags = {}
|
||||
f = open(os.path.join(user_dir, 'tags.csv'), 'r')
|
||||
f = open(os.path.join(config.get('_USERDIR'), 'tags.csv'), 'r')
|
||||
while True:
|
||||
r = f.readline().rstrip()
|
||||
if len(r) == 0:
|
||||
@ -154,7 +146,7 @@ if __name__ == '__main__':
|
||||
|
||||
i = 0
|
||||
j = 0
|
||||
for x in os.walk(user_old_dir):
|
||||
for x in os.walk(dirs['old']):
|
||||
for y in x[2]:
|
||||
if y[len(y)-5:] != '.json':
|
||||
continue
|
||||
@ -173,12 +165,12 @@ if __name__ == '__main__':
|
||||
new_address = register_eth(i, u)
|
||||
if u.identities.get('evm') == None:
|
||||
u.identities['evm'] = {}
|
||||
sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id())
|
||||
u.identities['evm'][sub_chain_str] = [new_address]
|
||||
sub_chain_str = '{}:{}'.format(chain_spec.network_id(), chain_spec.common_name())
|
||||
u.identities['evm']['foo'][sub_chain_str] = [new_address]
|
||||
|
||||
new_address_clean = strip_0x(new_address)
|
||||
filepath = os.path.join(
|
||||
user_new_dir,
|
||||
dirs['new'],
|
||||
new_address_clean[:2].upper(),
|
||||
new_address_clean[2:4].upper(),
|
||||
new_address_clean.upper() + '.json',
|
||||
@ -190,18 +182,17 @@ if __name__ == '__main__':
|
||||
f.write(json.dumps(o))
|
||||
f.close()
|
||||
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
|
||||
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), MetadataPointer.PERSON)
|
||||
meta_filepath = os.path.join(dirs['meta'], '{}.json'.format(new_address_clean.upper()))
|
||||
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||
|
||||
phone_object = phonenumbers.parse(u.tel)
|
||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
logg.debug('>>>>> Using phone {}'.format(phone))
|
||||
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||
meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key)
|
||||
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), MetadataPointer.PHONE)
|
||||
meta_phone_filepath = os.path.join(dirs['phone'], 'meta', meta_phone_key)
|
||||
|
||||
filepath = os.path.join(
|
||||
phone_dir,
|
||||
dirs['phone'],
|
||||
'new',
|
||||
meta_phone_key[:2].upper(),
|
||||
meta_phone_key[2:4].upper(),
|
||||
@ -217,11 +208,11 @@ if __name__ == '__main__':
|
||||
|
||||
|
||||
# custom data
|
||||
custom_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.custom')
|
||||
custom_filepath = os.path.join(custom_dir, 'meta', custom_key)
|
||||
custom_key = generate_metadata_pointer(phone.encode('utf-8'), MetadataPointer.CUSTOM)
|
||||
custom_filepath = os.path.join(dirs['custom'], 'meta', custom_key)
|
||||
|
||||
filepath = os.path.join(
|
||||
custom_dir,
|
||||
dirs['custom'],
|
||||
'new',
|
||||
custom_key[:2].upper(),
|
||||
custom_key[2:4].upper(),
|
||||
@ -229,9 +220,9 @@ if __name__ == '__main__':
|
||||
)
|
||||
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||
|
||||
sub_old_chain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
|
||||
sub_old_chain_str = '{}:{}'.format(old_chain_spec.network_id(), old_chain_spec.common_name())
|
||||
f = open(filepath, 'w')
|
||||
k = u.identities['evm'][sub_old_chain_str][0]
|
||||
k = u.identities['evm']['foo'][sub_old_chain_str][0]
|
||||
tag_data = {'tags': user_tags[strip_0x(k)]}
|
||||
f.write(json.dumps(tag_data))
|
||||
f.close()
|
||||
|
50
apps/data-seeding/import_ussd.sh
Normal file → Executable file
50
apps/data-seeding/import_ussd.sh
Normal file → Executable file
@ -2,11 +2,11 @@
|
||||
|
||||
if [[ -d "$OUT_DIR" ]]
|
||||
then
|
||||
echo "found existing IMPORT DIR cleaning up..."
|
||||
echo -e "\033[;96mfound existing IMPORT DIR cleaning up...\033[;96m"
|
||||
rm -rf "$OUT_DIR"
|
||||
mkdir -p "$OUT_DIR"
|
||||
else
|
||||
echo "IMPORT DIR does not exist creating it."
|
||||
echo -e "\033[;96mIMPORT DIR does not exist creating it.\033[;96m"
|
||||
mkdir -p "$OUT_DIR"
|
||||
fi
|
||||
|
||||
@ -14,81 +14,81 @@ fi
|
||||
timeout 5 celery inspect ping -b "$CELERY_BROKER_URL"
|
||||
if [[ $? -eq 124 ]]
|
||||
then
|
||||
>&2 echo "Celery workers not available. Is the CELERY_BROKER_URL ($CELERY_BROKER_URL) correct?"
|
||||
>&2 echo -e "\033[;96mCelery workers not available. Is the CELERY_BROKER_URL ($CELERY_BROKER_URL) correct?\033[;96m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Creating seed data..."
|
||||
echo -e "\033[;96mCreating seed data...\033[;96m"
|
||||
python create_import_users.py -vv -c "$CONFIG" --dir "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||
wait $!
|
||||
|
||||
echo "Check for running celery workers ..."
|
||||
echo -e "\033[;96mCheck for running celery workers ...\033[;96m"
|
||||
if [ -f ./cic-ussd-import.pid ];
|
||||
then
|
||||
echo "Found a running worker. Killing ..."
|
||||
echo -e "\033[;96mFound a running worker. Killing ...\033[;96m"
|
||||
kill -9 $(<cic-ussd-import.pid)
|
||||
fi
|
||||
|
||||
echo "Purge tasks from celery worker"
|
||||
echo -e "\033[;96mPurge tasks from celery worker\033[;96m"
|
||||
celery -A cic_ussd.import_task purge -Q "$CELERY_QUEUE" --broker redis://"$REDIS_HOST":"$REDIS_PORT" -f
|
||||
|
||||
echo "Start celery work and import balance job"
|
||||
echo -e "\033[;96mStart celery work and import balance job\033[;96m"
|
||||
if [ "$INCLUDE_BALANCES" != "y" ]
|
||||
then
|
||||
echo "Running worker without opening balance transactions"
|
||||
echo -e "\033[;96mRunning worker without opening balance transactions\033[;96m"
|
||||
TARGET_TX_COUNT=$NUMBER_OF_USERS
|
||||
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
|
||||
else
|
||||
echo "Running worker with opening balance transactions"
|
||||
echo -e "\033[;96mRunning worker with opening balance transactions\033[;96m"
|
||||
TARGET_TX_COUNT=$((NUMBER_OF_USERS*2))
|
||||
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" &
|
||||
fi
|
||||
|
||||
echo "Target count set to ${TARGET_TX_COUNT}"
|
||||
echo -e "\033[;96mTarget count set to ${TARGET_TX_COUNT}"
|
||||
until [ -f ./cic-import-ussd.pid ]
|
||||
do
|
||||
echo "Polling for celery worker pid file..."
|
||||
echo -e "\033[;96mPolling for celery worker pid file...\033[;96m"
|
||||
sleep 1
|
||||
done
|
||||
IMPORT_BALANCE_JOB=$(<cic-import-ussd.pid)
|
||||
|
||||
echo "Start import users job"
|
||||
echo -e "\033[;96mStart import users job\033[;96m"
|
||||
if [ "$USSD_SSL" == "y" ]
|
||||
then
|
||||
echo "Targeting secure ussd-user server"
|
||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$OUT_DIR"
|
||||
echo -e "\033[;96mTargeting secure ussd-user server\033[;96m"
|
||||
python cic_ussd/import_users.py -vv -f -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$OUT_DIR"
|
||||
else
|
||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$OUT_DIR"
|
||||
python cic_ussd/import_users.py -vv -f -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$OUT_DIR"
|
||||
fi
|
||||
|
||||
echo "Waiting for import balance job to complete ..."
|
||||
echo -e "\033[;96mWaiting for import balance job to complete ...\033[;96m"
|
||||
tail --pid="$IMPORT_BALANCE_JOB" -f /dev/null
|
||||
set -e
|
||||
|
||||
echo "Importing pins"
|
||||
echo -e "\033[;96mImporting pins\033[;96m"
|
||||
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$OUT_DIR"
|
||||
set +e
|
||||
wait $!
|
||||
set -e
|
||||
|
||||
echo "Importing ussd data"
|
||||
echo -e "\033[;96mImporting ussd data\033[;96m"
|
||||
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$OUT_DIR"
|
||||
set +e
|
||||
wait $!
|
||||
|
||||
echo "Importing person metadata"
|
||||
echo -e "\033[;96mImporting person metadata\033[;96m"
|
||||
node cic_meta/import_meta.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||
|
||||
echo "Import preferences metadata"
|
||||
echo -e "\033[;96mImport preferences metadata\033[;96m"
|
||||
node cic_meta/import_meta_preferences.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||
|
||||
CIC_NOTIFY_DATABASE=postgres://$DATABASE_USER:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT/$NOTIFY_DATABASE_NAME
|
||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||
while [[ "$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ]]
|
||||
while (("$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ))
|
||||
do
|
||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||
sleep 5
|
||||
echo "Notification count is: ${NOTIFICATION_COUNT} of ${TARGET_TX_COUNT}. Checking after 5 ..."
|
||||
echo -e "\033[;96mNotification count is: ${NOTIFICATION_COUNT} of ${TARGET_TX_COUNT}. Checking after 5 ...\033[;96m"
|
||||
done
|
||||
echo "Running verify script."
|
||||
python verify.py -c "$CONFIG" -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --meta-provider "$META_URL" --token-symbol "$TOKEN_SYMBOL" --ussd-provider "$USSD_PROVIDER" "$OUT_DIR"
|
||||
echo -e "\033[;96mRunning verify script.\033[;96m"
|
||||
python verify.py -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --meta-provider "$META_URL" --token-symbol "$TOKEN_SYMBOL" --ussd-provider "$USSD_PROVIDER" "$OUT_DIR"
|
||||
|
@ -1,14 +1,15 @@
|
||||
sarafu-faucet~=0.0.7a2
|
||||
cic-eth[tools]~=0.12.4a8
|
||||
cic-types~=0.1.0a15
|
||||
crypto-dev-signer~=0.4.15a7
|
||||
cic-eth[tools]~=0.12.4a13
|
||||
cic-types~=0.2.0a6
|
||||
funga>=0.5.1a1,<=0.5.15
|
||||
faker==4.17.1
|
||||
chainsyncer~=0.0.6a3
|
||||
chainlib-eth~=0.0.9a14
|
||||
eth-address-index~=0.2.3a4
|
||||
chainsyncer~=0.0.7a3
|
||||
chainlib-eth~=0.0.10a10
|
||||
eth-address-index~=0.2.4a1
|
||||
eth-contract-registry~=0.6.3a3
|
||||
eth-accounts-index~=0.1.2a3
|
||||
eth-erc20~=0.1.2a3
|
||||
erc20-faucet~=0.3.2a2
|
||||
psycopg2==2.8.6
|
||||
liveness~=0.0.1a7
|
||||
confini>=0.4.2rc3,<0.5.0
|
||||
|
@ -25,10 +25,9 @@ from chainlib.eth.gas import (
|
||||
from chainlib.eth.tx import TxFactory
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainlib.jsonrpc import JSONRPCRequest
|
||||
from cic_types.models.person import (
|
||||
Person,
|
||||
generate_metadata_pointer,
|
||||
)
|
||||
from cic_types.models.person import Person, identity_tag
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
from erc20_faucet import Faucet
|
||||
from eth_erc20 import ERC20
|
||||
from hexathon.parse import strip_0x, add_0x
|
||||
@ -39,7 +38,8 @@ from eth_token_index import TokenUniqueSymbolIndex
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = '/usr/local/etc/cic-syncer'
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
base_config_dir = os.path.join(script_dir, 'config')
|
||||
|
||||
custodial_tests = [
|
||||
'local_key',
|
||||
@ -72,8 +72,8 @@ all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('-c', type=str, help='config override dir')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:foo:1:oldchain', help='chain spec')
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
||||
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
|
||||
@ -96,14 +96,18 @@ if args.v == True:
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config = None
|
||||
logg.debug('config dir {}'.format(base_config_dir))
|
||||
if args.c != None:
|
||||
config = confini.Config(base_config_dir, env_prefix=os.environ.get('CONFINI_ENV_PREFIX'), override_dirs=args.c)
|
||||
else:
|
||||
config = confini.Config(base_config_dir, env_prefix=os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
|
||||
# override args
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CHAIN_SPEC': getattr(args, 'i'),
|
||||
'RPC_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
@ -114,11 +118,9 @@ config.add(args.ussd_provider, '_USSD_PROVIDER', True)
|
||||
|
||||
token_symbol = args.token_symbol
|
||||
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||
chain_str = str(chain_spec)
|
||||
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||
old_chain_str = str(old_chain_spec)
|
||||
@ -304,7 +306,7 @@ class Verifier:
|
||||
|
||||
|
||||
def verify_gas(self, address, balance_token=None):
|
||||
o = balance(address)
|
||||
o = balance(add_0x(address))
|
||||
r = self.conn.do(o)
|
||||
logg.debug('wtf {}'.format(r))
|
||||
actual_balance = int(strip_0x(r), 16)
|
||||
@ -320,7 +322,7 @@ class Verifier:
|
||||
|
||||
|
||||
def verify_metadata(self, address, balance=None):
|
||||
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
|
||||
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), MetadataPointer.PERSON)
|
||||
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||
logg.debug('verify metadata url {}'.format(url))
|
||||
try:
|
||||
@ -364,7 +366,7 @@ class Verifier:
|
||||
|
||||
p = Person.deserialize(o)
|
||||
|
||||
k = generate_metadata_pointer(p.tel.encode('utf-8'), ':cic.phone')
|
||||
k = generate_metadata_pointer(p.tel.encode('utf-8'), MetadataPointer.PHONE)
|
||||
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||
logg.debug('verify metadata phone url {}'.format(url))
|
||||
try:
|
||||
@ -424,7 +426,7 @@ class Verifier:
|
||||
def main():
|
||||
global chain_str, block_offset, user_dir
|
||||
|
||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||
|
||||
# Get Token registry address
|
||||
@ -502,10 +504,17 @@ def main():
|
||||
u = Person.deserialize(o)
|
||||
#logg.debug('data {}'.format(u.identities['evm']))
|
||||
|
||||
subchain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id())
|
||||
new_address = u.identities['evm'][subchain_str][0]
|
||||
subchain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
|
||||
old_address = u.identities['evm'][subchain_str][0]
|
||||
new_chain_spec = chain_spec.asdict()
|
||||
arch = new_chain_spec.get('arch')
|
||||
fork = new_chain_spec.get('fork')
|
||||
tag = identity_tag(new_chain_spec)
|
||||
new_address = u.identities[arch][fork][tag][0]
|
||||
|
||||
old_chainspec = old_chain_spec.asdict()
|
||||
arch = old_chainspec.get('arch')
|
||||
fork = old_chainspec.get('fork')
|
||||
tag = identity_tag(old_chainspec)
|
||||
old_address = u.identities[arch][fork][tag][0]
|
||||
balance = 0
|
||||
try:
|
||||
balance = balances[old_address]
|
||||
|
1122
docker-compose.yml
1122
docker-compose.yml
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user