pop required params
This commit is contained in:
parent
b42f076d85
commit
6a0544b579
@ -1,39 +1,30 @@
|
||||
# standard imports
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
from os import path
|
||||
from urllib.parse import parse_qsl, urlparse
|
||||
|
||||
import redis
|
||||
from cic_eth.api.api_task import Api
|
||||
from cic_eth.server.celery_helper import call
|
||||
from cic_eth.server.UWSGIOpenAPIRequest import UWSGIOpenAPIRequest
|
||||
from openapi_core import create_spec
|
||||
from openapi_core.validation.request.datatypes import OpenAPIRequest
|
||||
from openapi_core.validation.request.validators import RequestValidator
|
||||
from openapi_spec_validator.schemas import read_yaml_file
|
||||
from werkzeug.wrappers import Request
|
||||
from cic_eth.server.celery_helper import call
|
||||
|
||||
spec_dict = read_yaml_file(path.join(path.dirname(
|
||||
__file__), '../../server/openapi/server.yaml'))
|
||||
spec = create_spec(spec_dict)
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
# uwsgi application
|
||||
|
||||
|
||||
def application(env, start_response):
|
||||
print(spec)
|
||||
request = Request(env)
|
||||
oAPIRequest = UWSGIOpenAPIRequest(request)
|
||||
oAPIRequest = UWSGIOpenAPIRequest(env)
|
||||
validator = RequestValidator(spec)
|
||||
result = validator.validate(oAPIRequest)
|
||||
|
||||
@ -51,31 +42,62 @@ def application(env, start_response):
|
||||
return [content]
|
||||
parsed_url = urlparse(env.get('REQUEST_URI')) # /api
|
||||
path = parsed_url.path
|
||||
parsed_url.params
|
||||
params = dict(parse_qsl(parsed_url.query))
|
||||
|
||||
# Setup Channel to receive the result
|
||||
|
||||
if path == '/list':
|
||||
address = params.get('address')
|
||||
print('address', address)
|
||||
# address, limit=10, external_task=None, external_queue=None
|
||||
data = call('list', address)
|
||||
address = params.pop('address')
|
||||
print('address', address, )
|
||||
# address, limit=10
|
||||
data = call('list', address, **params)
|
||||
|
||||
elif path == '/balance':
|
||||
data = call('balance', **params)
|
||||
address = params.pop('address')
|
||||
token_symbol = params.pop('token_symbol')
|
||||
data = call('balance', address, token_symbol, **params)
|
||||
for b in data:
|
||||
b.update({
|
||||
"balance_available": int(b['balance_network']) + int(b['balance_incoming']) - int(b['balance_outgoing'])
|
||||
})
|
||||
elif path == '/create_account':
|
||||
data = call('create_account', **params)
|
||||
|
||||
elif path == '/refill_gas':
|
||||
address = params.pop('address')
|
||||
data = call('refill_gas', address)
|
||||
|
||||
elif path == '/ping':
|
||||
data = call('ping', **params)
|
||||
|
||||
elif path == '/transfer':
|
||||
data = call('transfer', **params)
|
||||
from_address = params.pop('from_address')
|
||||
to_address = params.pop('to_address')
|
||||
value = params.pop('value')
|
||||
token_symbol = params.pop('token_symbol')
|
||||
|
||||
data = call('transfer', from_address, to_address, value, token_symbol)
|
||||
|
||||
elif path == '/transfer_from':
|
||||
data = call('transfer_from', **params)
|
||||
from_address = params.pop('from_address')
|
||||
to_address = params.pop('to_address')
|
||||
value = params.pop('value')
|
||||
token_symbol = params.pop('token_symbol')
|
||||
spender_address = params.pop('spender_address')
|
||||
data = call('transfer_from', from_address, to_address,
|
||||
value, token_symbol, spender_address)
|
||||
|
||||
elif path == '/token':
|
||||
data = call('token', **params)
|
||||
token_symbol = params.pop('token_symbol')
|
||||
data = call('token', token_symbol, **params)
|
||||
|
||||
elif path == '/tokens':
|
||||
data = call('tokens', **params)
|
||||
token_symbols = params.pop('token_symbols')
|
||||
data = call('tokens', token_symbols, **params)
|
||||
|
||||
elif path == '/default_token':
|
||||
data = call('default_token', )
|
||||
|
||||
|
||||
json_data = json.dumps(data)
|
||||
content = json_data.encode('utf-8')
|
||||
headers = []
|
||||
|
@ -9,19 +9,18 @@ from werkzeug.datastructures import ImmutableMultiDict
|
||||
|
||||
from openapi_core.validation.request.datatypes import OpenAPIRequest
|
||||
from openapi_core.validation.request.datatypes import RequestParameters
|
||||
from werkzeug.wrappers import Request
|
||||
|
||||
|
||||
class UWSGIOpenAPIRequestFactory:
|
||||
@classmethod
|
||||
def create(cls, request):
|
||||
def create(cls, env):
|
||||
"""
|
||||
Converts a requests request to an OpenAPI one
|
||||
|
||||
Internally converts to a `PreparedRequest` first to parse the exact
|
||||
payload being sent
|
||||
"""
|
||||
if isinstance(request, Request):
|
||||
request = request.prepare()
|
||||
request = Request(env)
|
||||
|
||||
|
||||
# Method
|
||||
method = request.method.lower()
|
||||
@ -44,7 +43,6 @@ class UWSGIOpenAPIRequestFactory:
|
||||
header = Headers(dict(request.headers))
|
||||
|
||||
# Body
|
||||
# TODO: figure out if request._body_position is relevant
|
||||
body = request.get_data()
|
||||
|
||||
# Path gets deduced by path finder against spec
|
||||
|
@ -3,28 +3,27 @@ import logging
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import cic_eth.cli
|
||||
import redis
|
||||
from cic_eth.api.api_task import Api
|
||||
from cic_eth.server.config import config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
# TODO Remove
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
celery_app = cic_eth.cli.CeleryApp.from_config(config)
|
||||
celery_app.set_default()
|
||||
|
||||
chain_spec = config.get('CHAIN_SPEC')
|
||||
celery_queue = config.get('CELERY_QUEUE')
|
||||
|
||||
api = Api(
|
||||
chain_spec,
|
||||
queue=celery_queue,
|
||||
)
|
||||
|
||||
|
||||
redis_host = config.get('REDIS_HOST')
|
||||
redis_port = config.get('REDIS_PORT')
|
||||
redis_db = config.get('REDIS_DB')
|
||||
|
||||
|
||||
def call(method, *args):
|
||||
def call(method, *args, **kwargs):
|
||||
""" Creates a redis channel and calls `cic_eth.api` with the provided `method` and `*args`. Returns the result of the api call
|
||||
"""
|
||||
redis_channel = str(uuid.uuid4())
|
||||
@ -40,15 +39,16 @@ def call(method, *args):
|
||||
callback_queue=celery_queue,
|
||||
)
|
||||
getattr(api, method)(*args)
|
||||
|
||||
ps.get_message()
|
||||
try:
|
||||
o = ps.get_message(timeout=config.get('REDIS_TIMEOUT'))
|
||||
except TimeoutError as e:
|
||||
sys.stderr.write(
|
||||
f'cic_eth.api.{method}({args}) timed out:\n {e}')
|
||||
f'cic_eth.api.{method}({args}, {kwargs}) timed out:\n {e}')
|
||||
sys.exit(1)
|
||||
|
||||
log.debug(f"cic_eth.api.{method}({args})\n {o}")
|
||||
log.debug(f"cic_eth.api.{method}({args}, {kwargs})\n {o}")
|
||||
|
||||
ps.unsubscribe()
|
||||
try:
|
||||
@ -59,4 +59,3 @@ def call(method, *args):
|
||||
sys.stderr.write(
|
||||
f'Unable to parse Data:\n{o}\n Error:\n{e}')
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -6,4 +6,4 @@ argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
||||
|
||||
argparser.process_local_flags(local_arg_flags)
|
||||
args = argparser.parse_args()
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
||||
|
@ -208,6 +208,8 @@ services:
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- cic-eth-tasker
|
||||
- cic-eth-tracker
|
||||
- cic-eth-dispatcher
|
||||
volumes:
|
||||
- signer-data:/run/crypto-dev-signer
|
||||
- contract-config:/tmp/cic/config/:ro
|
||||
@ -219,9 +221,9 @@ services:
|
||||
if [[ -f /tmp/cic/config/env_reset ]]; then source /tmp/cic/config/env_reset; fi
|
||||
set +a
|
||||
/usr/local/bin/uwsgi \
|
||||
--wsgi-file /root/cic_eth/runnable/daemons/server.py \
|
||||
--http :5000 \
|
||||
--pyargv "-vv"
|
||||
--wsgi-file /root/cic_eth/runnable/daemons/server.py \
|
||||
--http :5000 \
|
||||
--pyargv "-vv"
|
||||
|
||||
cic-eth-tracker:
|
||||
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-eth:${TAG:-latest}
|
||||
|
Loading…
Reference in New Issue
Block a user