Merge remote-tracking branch 'origin/master' into lash/migration-last-gasp

This commit is contained in:
nolash 2021-03-24 13:12:56 +01:00
commit 585ad07c6e
14 changed files with 176 additions and 107 deletions

View File

@ -1,5 +1,5 @@
[database]
NAME=cic-eth
NAME=cic_cache
USER=postgres
PASSWORD=
HOST=localhost

View File

@ -36,7 +36,7 @@ script_location = .
# output_encoding = utf-8
#sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-cache
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic_cache
[post_write_hooks]

View File

@ -1,5 +0,0 @@
CREATE DATABASE "cic-cache";
CREATE DATABASE "cic-eth";
CREATE DATABASE "cic-notify";
CREATE DATABASE "cic-meta";
CREATE DATABASE "cic-signer";

View File

@ -25,6 +25,7 @@ licence_files =
python_requires = >= 3.6
packages =
cic_cache
cic_cache.tasks
cic_cache.db
cic_cache.db.models
cic_cache.runnable
@ -33,5 +34,6 @@ scripts =
[options.entry_points]
console_scripts =
cic-cache-tracker = cic_cache.runnable.tracker:main
cic-cache-server = cic_cache.runnable.server:main
cic-cache-trackerd = cic_cache.runnable.tracker:main
cic-cache-serverd = cic_cache.runnable.server:main
cic-cache-taskerd = cic_cache.runnable.tasker:main

View File

@ -10,7 +10,7 @@ from cic_notify.error import PleaseCommitFirstError
logg = logging.getLogger()
version = (0, 4, 0, 'alpha.2')
version = (0, 4, 0, 'alpha.3')
version_object = semver.VersionInfo(
major=version[0],
@ -24,9 +24,6 @@ version_string = str(version_object)
def git_hash():
import subprocess
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
if len(git_diff.stdout) > 0:
raise PleaseCommitFirstError()
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
@ -35,7 +32,7 @@ def git_hash():
try:
version_git = git_hash()
version_string += '.build.{}'.format(version_git)
version_string += '+build.{}'.format(version_git)
except FileNotFoundError:
time_string_pair = str(time.time()).split('.')
version_string += '+build.{}{:<09d}'.format(

View File

@ -1,5 +1,5 @@
celery~=4.4.7
confini~=0.3.6a1
alembic~=1.4.2
celery~=4.4.7
confini~=0.3.6rc3
redis~=3.5.3
semver==2.13.0

View File

@ -14,7 +14,7 @@ from cic_ussd.db.models.user import User
from cic_ussd.db.models.ussd_session import UssdSession
from cic_ussd.db.models.task_tracker import TaskTracker
from cic_ussd.menu.ussd_menu import UssdMenu
from cic_ussd.processor import custom_display_text, process_request
from cic_ussd.processor import custom_display_text, process_request, retrieve_most_recent_ussd_session
from cic_ussd.redis import InMemoryStore
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
from cic_ussd.validator import check_known_user, validate_response_type
@ -60,7 +60,8 @@ def create_ussd_session(
phone: str,
service_code: str,
user_input: str,
current_menu: str) -> InMemoryUssdSession:
current_menu: str,
session_data: Optional[dict] = None) -> InMemoryUssdSession:
"""
Creates a new ussd session
:param external_session_id: Session id value provided by AT
@ -73,6 +74,8 @@ def create_ussd_session(
:type user_input: str
:param current_menu: Menu name that is currently being displayed on the ussd session
:type current_menu: str
:param session_data: Any additional data that was persisted during the user's interaction with the system.
:type session_data: dict.
:return: ussd session object
:rtype: Session
"""
@ -81,7 +84,8 @@ def create_ussd_session(
msisdn=phone,
user_input=user_input,
state=current_menu,
service_code=service_code
service_code=service_code,
session_data=session_data
)
return session
@ -126,7 +130,9 @@ def create_or_update_session(
phone=phone,
service_code=service_code,
user_input=user_input,
current_menu=current_menu)
current_menu=current_menu,
session_data=session_data
)
return ussd_session
@ -338,14 +344,26 @@ def process_menu_interaction_requests(chain_str: str,
user_input=user_input
)
# create or update the ussd session as appropriate
ussd_session = create_or_update_session(
external_session_id=external_session_id,
phone=phone_number,
service_code=service_code,
user_input=user_input,
current_menu=current_menu.get('name')
)
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
if last_ussd_session:
# create or update the ussd session as appropriate
ussd_session = create_or_update_session(
external_session_id=external_session_id,
phone=phone_number,
service_code=service_code,
user_input=user_input,
current_menu=current_menu.get('name'),
session_data=last_ussd_session.session_data
)
else:
ussd_session = create_or_update_session(
external_session_id=external_session_id,
phone=phone_number,
service_code=service_code,
user_input=user_input,
current_menu=current_menu.get('name')
)
# define appropriate response
response = custom_display_text(

View File

@ -6,7 +6,7 @@ from typing import Optional
# third party imports
import celery
from cic_types.models.person import Person
from sqlalchemy import desc
from tinydb.table import Document
# local imports
@ -315,6 +315,16 @@ def process_start_menu(display_key: str, user: User):
)
def retrieve_most_recent_ussd_session(phone_number: str) -> UssdSession:
# get last ussd session based on user phone number
last_ussd_session = UssdSession.session\
.query(UssdSession)\
.filter_by(msisdn=phone_number)\
.order_by(desc(UssdSession.created))\
.first()
return last_ussd_session
def process_request(user_input: str, user: User, ussd_session: Optional[dict] = None) -> Document:
"""This function assesses a request based on the user from the request comes, the session_id and the user's
input. It determines whether the request translates to a return to an existing session by checking whether the
@ -337,7 +347,23 @@ def process_request(user_input: str, user: User, ussd_session: Optional[dict] =
return UssdMenu.find_by_name(name=successive_state)
else:
if user.has_valid_pin():
return UssdMenu.find_by_name(name='start')
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
key = create_cached_data_key(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
salt='cic.person'
)
user_metadata = get_cached_data(key=key)
if last_ussd_session:
# get last state
last_state = last_ussd_session.state
logg.debug(f'LAST USSD SESSION STATE: {last_state}')
# if last state is account_creation_prompt and metadata exists, show start menu
if last_state == 'account_creation_prompt' and user_metadata is not None:
return UssdMenu.find_by_name(name='start')
else:
return UssdMenu.find_by_name(name=last_state)
else:
if user.failed_pin_attempts >= 3 and user.get_account_status() == AccountStatus.LOCKED.name:
return UssdMenu.find_by_name(name='exit_pin_blocked')

View File

@ -1,4 +1,5 @@
FROM python:3.8.5-alpine
# FROM python:3.8.5-alpine
FROM python:3.8.6-slim-buster
# set working directory
WORKDIR /usr/src
@ -6,10 +7,8 @@ WORKDIR /usr/src
# add args for installing from self-hosted packages
ARG pip_extra_index_url_flag='--extra-index-url https://pip.grassrootseconomics.net:8433'
# add alpine sys packages
RUN apk update && \
apk add git linux-headers postgresql-dev gnupg bash
RUN apk add --update musl-dev gcc libffi-dev
RUN apt-get update && \
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
# create secrets directory
RUN mkdir -vp pgp/keys

View File

@ -1,21 +1,49 @@
cic_base[full_graph]~=0.1.1a24
#cic_base[full_graph]~=0.1.1a24
africastalking==1.2.3
alembic==1.4.2
amqp==2.6.1
attrs==20.2.0
bcrypt==3.2.0
betterpath==0.2.2
billiard==3.6.3.0
celery==4.4.7
chainlib~=0.0.1a20
cic-eth~=0.10.0a40
cic-notify~=0.4.0a2
cic-types==0.1.0a8
cffi==1.14.3
cic-eth~=0.10.0a46
cic-notify~=0.4.0a3
cic-types~=0.1.0a8
click==7.1.2
confini~=0.3.6rc3
cryptography==3.2.1
faker==4.17.1
iniconfig==1.1.1
kombu==4.6.11
Mako==1.1.3
MarkupSafe==1.1.1
mirakuru==2.3.0
more-itertools==8.5.0
packaging==20.4
phonenumbers==8.12.12
pluggy==0.13.1
port-for==0.4
psutil==5.7.3
psycopg2==2.8.6
py==1.9.0
pycparser==2.20
pyparsing==2.4.7
python-dateutil==2.8.1
python-editor==1.0.4
python-gnupg==0.4.6
python-i18n==0.3.9
pytz==2020.1
PyYAML==5.3.1
redis==3.5.3
semver==2.13.0
six==1.15.0
SQLAlchemy==1.3.20
tinydb==4.2.0
toml==0.10.1
transitions==0.8.4
uWSGI==2.0.19.1
vcversioner==2.16.0.0
vine==1.3.0
zope.interface==5.1.2

View File

@ -19,6 +19,12 @@ echo \n
# pushd /usr/src
init_level_file=${CIC_DATA_DIR}/.init
if [ ! -f ${CIC_DATA_DIR}/.init ]; then
echo "Creating .init file..."
mkdir -p $CIC_DATA_DIR
touch /tmp/cic/config/.init
# touch $init_level_file
fi
echo -n 1 > $init_level_file
# Abort on any error (including if wait-for-it fails).
@ -74,10 +80,10 @@ mkdir -p $CIC_DATA_DIR
cat << EOF > $CIC_DATA_DIR/.env
export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS
export CIC_TRUST_ADDRESS=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
export CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS
EOF
cat $CIC_DATA_DIR/envlist | bash from_env.sh > $CIC_DATA_DIR/.env_all
cat ./envlist | bash from_env.sh > $CIC_DATA_DIR/.env_all
# popd
set +a

View File

@ -1,54 +1,35 @@
image: docker:19.03.13
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
variables:
# docker host
DOCKER_HOST: tcp://docker:2376
# container, thanks to volume mount from config.toml
DOCKER_TLS_CERTDIR: "/certs"
# These are usually specified by the entrypoint, however the
# Kubernetes executor doesn't run entrypoints
# https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4125
DOCKER_TLS_VERIFY: 1
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
# We are building these from the apps dir to easily share the requirements file there.
# It would be nicer to build from the app dir context. TODO figure out a nice way to do this in local DOCKER_TLS_VERIFY
CONTEXT: apps/
services:
- docker:19.03.13-dind
before_script:
- docker info
KANIKO_CACHE_ARGS: "--cache=true --cache-copy-layers=true --cache-ttl=24h"
CONTEXT: $CI_PROJECT_DIR/apps/
.py_build_merge_request:
stage: build
before_script:
- cd $CONTEXT
variables:
CI_DEBUG_TRACE: "true"
IMAGE_TAG: $APP_NAME:$CI_COMMIT_SHORT_SHA
- CI_DEBUG_TRACE: "true"
script:
- docker build -t $IMAGE_TAG -f $DOCKERFILE_PATH .
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > "/kaniko/.docker/config.json"
- /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --cache-repo $CI_REGISTRY_IMAGE --no-push
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always
.py_build_push:
stage: build
before_script:
- cd $CONTEXT
- echo "$CI_REGISTRY_PASSWORD" | docker login -u "$CI_REGISTRY_USER" $CI_REGISTRY --password-stdin
variables:
CI_DEBUG_TRACE: "true"
IMAGE_TAG_BASE: $CI_REGISTRY_IMAGE/$APP_NAME:$CI_COMMIT_BRANCH-$CI_COMMIT_SHORT_SHA
LATEST_TAG: $CI_REGISTRY_IMAGE/$APP_NAME:latest
script:
- export IMAGE_TAG="$IMAGE_TAG_BASE-$(date +%F.%H%M%S)"
- docker build -t $IMAGE_TAG -f $DOCKERFILE_PATH .
- docker push $IMAGE_TAG
- docker tag $IMAGE_TAG $LATEST_TAG
- docker push $LATEST_TAG
rules:
stage: build
variables:
IMAGE_TAG_BASE: $CI_REGISTRY_IMAGE/$APP_NAME:$CI_COMMIT_BRANCH-$CI_COMMIT_SHORT_SHA
LATEST_TAG: $CI_REGISTRY_IMAGE/$APP_NAME:latest
script:
- export IMAGE_TAG="$IMAGE_TAG_BASE-$(date +%F.%H%M%S)"
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > "/kaniko/.docker/config.json"
# - /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG
- /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG --destination $CI_REGISTRY_IMAGE/$APP_NAME:latest
rules:
- if: $CI_COMMIT_BRANCH == "master"
when: always

View File

@ -36,7 +36,7 @@ services:
restart: unless-stopped
ports:
- ${DEV_ETH_PORT_HTTP:-63545}:8545
- ${DEV_ETH_PORT_WS-63546}:8546
- ${DEV_ETH_PORT_WS:-63546}:8546
- 30303
volumes:
- ./apps/bloxbergValidatorSetup/keys:/root/keys # stores the signing key locally
@ -54,6 +54,7 @@ services:
volumes:
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
- ./apps/cic-meta/scripts/initdb/postgresql.sh:/docker-entrypoint-initdb.d/2-init-cic-meta.sh
- ./apps/cic-cache/db/psycopg2/db.sql:/docker-entrypoint-initdb.d/3-init-cic-meta.sql
- postgres-db:/var/lib/postgresql/data
redis:
@ -79,24 +80,7 @@ services:
build:
context: apps/
dockerfile: contract-migration/docker/Dockerfile
environment:
# ETH_PROVIDER should be broken out into host/port but cic-eth expects this
ETH_PROVIDER: http://eth:8545
# And these two are for wait-for-it (could parse this)
ETH_PROVIDER_HOST: eth
ETH_PROVIDER_PORT: 8545
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
CIC_DATA_DIR: ${CIC_DATA_DIR:-/tmp/cic/config}
command: ["./reset.sh"]
depends_on:
- eth
volumes:
- contract-config:/tmp/cic/config
seed-cic-eth:
build:
context: apps/
dockerfile: contract-migration/docker/Dockerfile
# image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:latest
environment:
# ETH_PROVIDER should be broken out into host/port but cic-eth expects this
ETH_PROVIDER: http://eth:8545
@ -117,10 +101,8 @@ services:
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis:6379}
CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis:6379}
DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
command: ["./seed_cic_eth.sh"]
# deploy:
#restart_policy:
# condition: on-failure
RUN_LEVEL: 2 #0: noop 1: contract migrations 2: seed data
command: ["./run_job.sh"]
depends_on:
- eth
- postgres
@ -129,7 +111,6 @@ services:
volumes:
- contract-config:/tmp/cic/config
cic-cache-tracker:
build:
context: apps
@ -162,7 +143,43 @@ services:
- -c
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
/usr/local/bin/cic-cache-tracker -vv
/usr/local/bin/cic-cache-trackerd -vv
volumes:
- contract-config:/tmp/cic/config/:ro
cic-cache-tasker:
build:
context: apps
dockerfile: cic-cache/docker/Dockerfile
environment:
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
DATABASE_USER: ${DATABASE_USER:-grassroots}
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432}
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic_cache}
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
DATABASE_DEBUG: 1
ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
CIC_TRUST_ADDRESS: ${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER:-0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C}
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
CELERY_BROKER_URL: redis://redis:6379
CELERY_RESULT_URL: redis://redis:6379
deploy:
restart_policy:
condition: on-failure
depends_on:
- redis
- postgres
- eth
command:
- /bin/bash
- -c
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
/usr/local/bin/cic-cache-taskerd -vv
volumes:
- contract-config:/tmp/cic/config/:ro
@ -192,7 +209,7 @@ services:
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
"/usr/local/bin/uwsgi" \
--wsgi-file /usr/src/cic-cache/cic_cache/runnable/server.py \
--wsgi-file /usr/src/cic-cache/cic_cache/runnable/serverd.py \
--http :8000 \
--pyargv -vv