philip/demurrage-token-deploy #1

Closed
mango-habanero wants to merge 4 commits from philip/demurrage-token-deploy into master
28 changed files with 1529 additions and 316 deletions
Showing only changes of commit fb3253ae55 - Show all commits

402
.pylintrc Normal file
View File

@ -0,0 +1,402 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=third_party
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=object_detection_grpc_client.py,prediction_pb2.py,prediction_pb2_grpc.py
# Pickle collected data for later comparisons.
persistent=no
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=4
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
#
# Kubeflow disables string-interpolation because we are starting to use f
# style strings
disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,missing-docstring,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,relative-import,invalid-name,bad-continuation,no-member,locally-disabled,fixme,import-error,too-many-locals,no-name-in-module,too-many-instance-attributes,no-self-use,logging-fstring-interpolation
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]". This option is deprecated
# and it will be removed in Pylint 2.0.
files-output=no
# Tells whether to display a full report or only the messages
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=140
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
# Use 2 spaces consistent with TensorFlow style.
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[DESIGN]
# Maximum number of arguments for function / method
max-args=7
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=0
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

101
cic/MetaRequestHandler.py Normal file
View File

@ -0,0 +1,101 @@
from __future__ import annotations
# standard imports
import json
import logging
import os
from typing import TYPE_CHECKING, Dict, Union
from cic_types.condiments import MetadataPointer
from cic_types.ext.metadata.signer import Signer
from cic_types.processor import generate_metadata_pointer
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.http import HTTPSession
# local imports
# external imports
logg = logging.getLogger(__file__)
class Metadata:
"""
:cvar base_url: The base url or the metadata server.
:type base_url: str
"""
base_url = None
ctrl: CmdCtrl = None
class MetadataRequestsHandler(Metadata):
def __init__(
self,
cic_type: MetadataPointer,
identifier: bytes,
engine: str = "pgp",
):
""""""
logg.debug(f"ctrl: {self.ctrl}")
self.opener: HTTPSession = self.ctrl.remote_openers["meta"]
self.cic_type = cic_type
self.engine = engine
self.headers = {"X-CIC-AUTOMERGE": "server", "Content-Type": "application/json"}
self.identifier = identifier
if cic_type == MetadataPointer.NONE:
self.metadata_pointer = identifier.hex()
else:
self.metadata_pointer = generate_metadata_pointer(
identifier=self.identifier, cic_type=self.cic_type
)
if self.base_url:
self.url = os.path.join(self.base_url, self.metadata_pointer)
def create(self, data: Union[Dict, str]):
""""""
data = json.dumps(data).encode("utf-8")
result = self.opener.open(
method="POST", url=self.url, data=data, headers=self.headers
)
logg.debug(
f"url: {self.url}, data: {data}, headers: {self.headers}, result: {result}"
)
metadata = json.loads(result)
return self.edit(data=metadata)
def edit(self, data: Union[Dict, str]):
""""""
cic_meta_signer = Signer()
signature = cic_meta_signer.sign_digest(data=data)
algorithm = cic_meta_signer.get_operational_key().get("algo")
formatted_data = {
"m": json.dumps(data),
"s": {
"engine": self.engine,
"algo": algorithm,
"data": signature,
"digest": data.get("digest"),
},
}
formatted_data = json.dumps(formatted_data).encode("utf-8")
result = self.opener.open(
method="PUT", url=self.url, data=formatted_data, headers=self.headers
)
logg.info(f"signed metadata submission returned: {result}.")
try:
decoded_identifier = self.identifier.decode("utf-8")
except UnicodeDecodeError:
decoded_identifier = self.identifier.hex()
return result
def query(self):
""""""
result = self.opener.open(method="GET", url=self.url)
result_data = json.loads(result)
if not isinstance(result_data, dict):
raise ValueError(f"invalid result data object: {result_data}.")
return result

View File

@ -1,32 +1,26 @@
# standard imports from __future__ import annotations
import logging
import importlib
import os
# standard imports
import importlib
import logging
import os
from typing import TYPE_CHECKING
# local imports
from cic import Processor, Proof
from cic.attachment import Attachment
from cic.meta import Meta, MetadataWriter
from cic.network import Network
from cic.output import HTTPWriter, KeyedWriterFactory
from cic.token import Token
# external imports # external imports
from cic_types.ext.metadata import MetadataRequestsHandler from cic.MetaRequestHandler import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner from cic_types.ext.metadata.signer import Signer as MetadataSigner
# local imports
from cic import (
Proof,
Processor,
)
from cic.output import (
HTTPWriter,
KeyedWriterFactory,
)
from cic.meta import (
Meta,
MetadataWriter,
)
from cic.attachment import Attachment
from cic.network import Network
from cic.token import Token
from typing import Optional
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.actions.types import Options
@ -47,19 +41,25 @@ def init_writers_from_config(config):
return w return w
def deploy(config, target: str,contract_directory: str, metadata_endpoint: Optional[str], keystore_directory: str, key_file_path: str, gpg_passphrase: str): def deploy(ctrl: CmdCtrl, target: str, contract_directory: str, keystore_directory: str, options: Options):
modname = 'cic.ext.{}'.format(target) auth_passphrase=options.auth_passphrase,
auth_key_file_path=options.auth_keyfile_path,
metadata_endpoint=options.metadata_endpoint,
modname = f'cic.ext.{target}'
cmd_mod = importlib.import_module(modname) cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config) writers = init_writers_from_config(ctrl.config)
output_directory = os.path.join(contract_directory, 'out') output_directory = os.path.join(contract_directory, 'out')
output_writer_path_meta = output_directory output_writer_path_meta = output_directory
if metadata_endpoint != None: if metadata_endpoint != None:
MetadataRequestsHandler.base_url = metadata_endpoint MetadataRequestsHandler.base_url = metadata_endpoint
MetadataSigner.gpg_path = os.path.join('/tmp') MetadataRequestsHandler.ctrl = ctrl
MetadataSigner.key_file_path = key_file_path
MetadataSigner.gpg_passphrase = gpg_passphrase MetadataSigner.gpg_path = '/tmp'
writers['proof'] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new MetadataSigner.key_file_path = auth_key_file_path # This is a p2p key for add data to meta
MetadataSigner.gpg_passphrase = auth_passphrase
writers['proof'] = KeyedWriterFactory(MetadataWriter, None).new
writers['attachment'] = KeyedWriterFactory(None, HTTPWriter).new writers['attachment'] = KeyedWriterFactory(None, HTTPWriter).new
writers['meta'] = MetadataWriter writers['meta'] = MetadataWriter
output_writer_path_meta = metadata_endpoint output_writer_path_meta = metadata_endpoint
@ -78,18 +78,18 @@ def deploy(config, target: str,contract_directory: str, metadata_endpoint: Optio
chain_spec = None chain_spec = None
try: try:
chain_spec = config.get('CHAIN_SPEC') chain_spec = ctrl.config.get('CHAIN_SPEC')
except KeyError: except KeyError:
chain_spec = cn.chain_spec chain_spec = cn.chain_spec
config.add(chain_spec, 'CHAIN_SPEC', exists_ok=True) ctrl.config.add(chain_spec, 'CHAIN_SPEC', exists_ok=True)
logg.debug(f'CHAIN_SPEC config set to {str(chain_spec)}') logg.debug(f'CHAIN_SPEC config set to {str(chain_spec)}')
(rpc, signer) = cmd_mod.parse_adapter(config, keystore_directory) (rpc, signer) = cmd_mod.parse_adapter(ctrl.config, keystore_directory)
ref = cn.resource(target) target_network_reference = cn.resource(target)
chain_spec = cn.chain_spec(target) chain_spec = cn.chain_spec(target)
logg.debug('found reference {} chain spec {} for target {}'.format(ref['contents'], chain_spec, target)) logg.debug(f'found reference {target_network_reference["contents"]} chain spec {chain_spec} for target {target}')
c = getattr(cmd_mod, 'new')(chain_spec, ref['contents'], cp, signer_hint=signer, rpc=rpc, outputs_writer=writers['ext'](path=output_directory)) c = getattr(cmd_mod, 'new')(chain_spec, target_network_reference['contents'], cp, signer_hint=signer, rpc=rpc, outputs_writer=writers['ext'](path=output_directory))
c.apply_token(ct) c.apply_token(ct)
p = Processor(proof=cp, attachment=ca, metadata=cm, extensions=[c]) p = Processor(proof=cp, attachment=ca, metadata=cm, extensions=[c])

28
cic/actions/types.py Normal file
View File

@ -0,0 +1,28 @@
from collections import namedtuple
Contract = namedtuple(
"Contract",
[
"token",
"proof",
"meta",
"attachment",
"network",
],
)
Options = namedtuple(
"Options",
[
"auth_db_path",
"auth_keyfile_path",
"auth_passphrase",
"contract_registry",
"key_account",
"chain_spec",
"rpc_provider",
"metadata_endpoint",
"wallet_keyfile",
"wallet_passphrase",
],
)

74
cic/auth.py Normal file
View File

@ -0,0 +1,74 @@
# standard imports
import hashlib
import logging
import os
# external imports
import gnupg
# local imports
from cic.errors import AuthError
logg = logging.getLogger(__name__)
class PGPAuthCrypt:
typ = "gnupg"
def __init__(self, db_dir, auth_key, pgp_dir=None):
self.db_dir = db_dir
try:
bytes.fromhex(auth_key)
except TypeError:
raise AuthError(f"invalid key {auth_key}") from TypeError
except ValueError:
raise AuthError(f"invalid key {auth_key}") from ValueError
self.auth_key = auth_key
self.gpg = gnupg.GPG(gnupghome=pgp_dir)
self.secret = None
self.__passphrase = None
def get_secret(self, passphrase=""):
if passphrase is None:
passphrase = ""
p = os.path.join(self.db_dir, ".secret")
try:
f = open(p, "rb")
except FileNotFoundError:
h = hashlib.sha256()
h.update(bytes.fromhex(self.auth_key))
h.update(passphrase.encode("utf-8"))
z = h.digest()
secret = self.gpg.encrypt(z, [self.auth_key], always_trust=True)
if not secret.ok:
raise AuthError(f"could not encrypt secret for {self.auth_key}") from FileNotFoundError
d = os.path.dirname(p)
os.makedirs(d, exist_ok=True)
f = open(p, "wb")
f.write(secret.data)
f.close()
f = open(p, "rb")
secret = self.gpg.decrypt_file(f, passphrase=passphrase)
if not secret.ok:
raise AuthError("could not decrypt encryption secret. wrong password?")
f.close()
self.secret = secret.data
self.__passphrase = passphrase
def get_passphrase(self):
return self.__passphrase
def fingerprint(self):
return self.auth_key
def sign(self, plaintext, encoding, passphrase="", detach=True):
r = self.gpg.sign(plaintext, passphrase=passphrase, detach=detach)
if len(r.data) == 0:
raise AuthError("signing failed: " + r.status)
if encoding == "base64":
r = r.data
return r

View File

@ -0,0 +1 @@
from cic.cmd.arg import CmdCtrl

221
cic/cmd/arg.py Normal file
View File

@ -0,0 +1,221 @@
# standard imports
import importlib
import logging
import os
import sys
# external imports
import chainlib.eth.cli
import cic.cmd.easy as cmd_easy
import cic.cmd.export as cmd_export
import cic.cmd.ext as cmd_ext
import cic.cmd.init as cmd_init
import cic.cmd.show as cmd_show
from chainlib.chain import ChainSpec
from cic.auth import PGPAuthCrypt
from cic.crypt.aes import AESCTREncrypt
from cic.http import HTTPSession, PGPClientSession
# local imports
from cic.notify import NotifyWriter
notifier = NotifyWriter()
logg = logging.getLogger(__name__)
script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(script_dir, "..", "data")
base_config_dir = os.path.join(data_dir, "config")
class NullWriter:
def notify(self, v):
pass
def ouch(self, v):
pass
def write(self, v):
sys.stdout.write(str(v))
class CmdCtrl:
__cmd_alias = {
"u": "user",
"t": "tag",
}
__auth_for = [
"user",
]
def __init__(self, *_args, argv=None, _description=None, logger=None, **_kwargs):
self.args(argv)
self.logging(logger)
self.module()
self.load_config()
self.notifier()
self.auth()
self.blockchain()
self.remote_openers = {}
if self.get("META_URL") is not None:
auth_client_session = PGPClientSession(self.__auth)
self.remote_openers["meta"] = HTTPSession(
self.get("META_URL"),
auth=auth_client_session,
origin=self.config.get("META_HTTP_ORIGIN"),
)
def blockchain(self):
self.chain_spec = ChainSpec.from_chain_str(self.config.get("CHAIN_SPEC"))
self.rpc = chainlib.eth.cli.Rpc()
self.__conn = self.rpc.connect_by_config(self.config)
def args(self, argv):
self.argparser = chainlib.eth.cli.ArgumentParser(
chainlib.eth.cli.argflag_std_read
)
sub = self.argparser.add_subparsers()
sub.dest = "command"
sub_init = sub.add_parser("init", help="initialize new cic data directory")
cmd_init.process_args(sub_init)
sub_show = sub.add_parser(
"show", help="display summary of current state of cic data directory"
)
cmd_show.process_args(sub_show)
sub_export = sub.add_parser(
"export", help="export cic data directory state to a specified target"
)
cmd_export.process_args(sub_export)
sub_ext = sub.add_parser("ext", help="extension helpers")
cmd_ext.process_args(sub_ext)
sub_easy = sub.add_parser("easy", help="Easy Mode Contract Deployment")
cmd_easy.process_args(sub_easy)
self.cmd_args = self.argparser.parse_args(argv)
def module(self):
self.cmd_string = self.cmd_args.command
cmd_string_translate = self.__cmd_alias.get(self.cmd_string)
if cmd_string_translate is not None:
self.cmd_string = cmd_string_translate
if self.cmd_string is None:
self.cmd_string = "none"
self.argparser.print_help()
exit(1)
modname = f"cic.cmd.{self.cmd_string}"
self.logger.debug(f"using module {modname}")
self.cmd_mod = importlib.import_module(modname)
def logging(self, logger):
self.logger = logger
if self.logger is None:
self.logger = logging.getLogger()
if self.cmd_args.vv:
self.logger.setLevel(logging.DEBUG)
elif self.cmd_args.v:
self.logger.setLevel(logging.INFO)
def load_config(self):
override_dir = self.cmd_args.config
if override_dir is None:
p = os.environ.get("HOME")
if p is not None:
p = os.path.join(p, ".config", "cic", "cli")
try:
os.stat(p)
override_dir = p
logg.info(
f"applying user config override from standard location: {p}"
)
except FileNotFoundError:
pass
extra_args = self.cmd_mod.extra_args()
self.config = chainlib.eth.cli.Config.from_args(
self.cmd_args,
base_config_dir=base_config_dir,
extra_args=extra_args,
default_config_dir=override_dir,
)
self.config.add(False, "_SEQ")
self.config.censor("AUTH_PASSPHRASE")
self.logger.debug(f"loaded config:\n{self.config}")
def auth(self):
typ = self.get("AUTH_TYPE")
if typ != "gnupg":
raise NotImplementedError("Valid aut implementations are: gnupg")
default_auth_db_path = None
if os.environ.get("HOME") is not None:
default_auth_db_path = os.path.join(
os.environ["HOME"], ".local/share/cic/clicada"
)
auth_db_path = self.get("AUTH_DB_PATH", default_auth_db_path)
self.__auth = PGPAuthCrypt(
auth_db_path, self.get("AUTH_KEY"), self.get("AUTH_KEYRING_PATH")
)
self.__auth.get_secret(self.get("AUTH_PASSPHRASE"))
self.encrypter = AESCTREncrypt(auth_db_path, self.__auth.secret)
logg.debug(f"loaded auth: {self.__auth}")
logg.debug(f"AUTH_PASSPHRASE: {self.get('AUTH_PASSPHRASE')}")
logg.debug(f"AUTH_KEY: {self.get('AUTH_KEY')}")
logg.debug(f"AUTH_DB_PATH: {self.get('AUTH_DB_PATH')}")
logg.debug(f"AUTH_KEYRING_PATH: {self.get('AUTH_KEYRING_PATH')}")
def get(self, k, default=None):
r = self.config.get(k, default)
if k in [
"_FORCE",
]:
if r is None:
return False
return self.config.true(k)
return r
def chain(self):
return self.chain_spec
def conn(self):
return self.__conn
def execute(self):
self.cmd_mod.execute(self)
def opener(self, k):
return self.remote_openers[k]
def notifier(self):
if logg.root.level >= logging.WARNING:
logging.disable()
self.writer = notifier
else:
self.writer = NullWriter()
def notify(self, v):
self.writer.notify(v)
def ouch(self, v):
self.writer.ouch(v)
print()
def write(self, v):
self.writer.write("")
self.writer.write(v)
print()

View File

@ -1,9 +1,11 @@
from __future__ import annotations
# standard import # standard import
import importlib import importlib
import json import json
import logging import logging
import os import os
import subprocess from typing import TYPE_CHECKING
import requests import requests
@ -18,6 +20,9 @@ from cic.meta import Meta
from cic.network import Network from cic.network import Network
from cic.token import Token from cic.token import Token
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.actions.types import Options, Contract
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -47,7 +52,17 @@ def process_args(argparser):
) )
def validate_args(args): def extra_args():
return {
"path": "_TOKEN_PATH",
"skip_gen": "_TOKEN_SKIP_GEN",
"skip_deploy": "_TOKEN_SKIP_DEPLOY",
"target": "_TOKEN_TARGET",
"p": "RPC_PROVIDER",
}
def validate_args(_args):
pass pass
@ -56,18 +71,6 @@ CONTRACTS = [
"url": "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken", "url": "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken",
"name": "Giftable Token", "name": "Giftable Token",
}, },
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenMultiCap",
"name": "Demurrage Token Multi Cap (Might not work)",
},
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenMultiNocap",
"name": "Demurrage Token Multi No Cap (Might not work)",
},
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleCap",
"name": "Demurrage Token Single Cap (Might not work)",
},
{ {
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap", "url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap",
"name": "Demurrage Token Single No Cap", "name": "Demurrage Token Single No Cap",
@ -95,8 +98,8 @@ def get_contract_args(data: list):
def print_contract_args(json_path: str): def print_contract_args(json_path: str):
json_data = json.load(open(json_path)) json_data = json.load(open(json_path, encoding="utf-8"))
print(f"Contract Args:") print("Contract Args:")
for contract_arg in get_contract_args(json_data): for contract_arg in get_contract_args(json_data):
print( print(
f"\t{contract_arg.get('name', '<no name>')} - {contract_arg.get('type', '<no type>')}" f"\t{contract_arg.get('name', '<no name>')} - {contract_arg.get('type', '<no type>')}"
@ -104,8 +107,8 @@ def print_contract_args(json_path: str):
def select_contract(): def select_contract():
print(f"Contracts:") print("Contracts:")
print(f"\t C - Custom (path/url to contract)") print("\t C - Custom (path/url to contract)")
for idx, contract in enumerate(CONTRACTS): for idx, contract in enumerate(CONTRACTS):
print(f"\t {idx} - {contract['name']}") print(f"\t {idx} - {contract['name']}")
@ -129,41 +132,40 @@ def select_contract():
json_path = possible_json_path json_path = possible_json_path
# possible_bin_location is url # possible_bin_location is url
else: else:
bin_path = download_file(contract["url"] + ".bin", directory) bin_path = download_file(possible_bin_location, directory)
else: else:
print("Invalid selection") print("Invalid selection")
exit(1) exit(1)
extra_args = [] contract_extra_args = []
extra_args_types = [] contract_extra_args_types = []
if os.path.exists(json_path): if os.path.exists(json_path):
json_data = json.load(open(json_path)) json_data = json.load(open(json_path, encoding="utf-8"))
for contract_arg in get_contract_args(json_data): for contract_arg in get_contract_args(json_data):
arg_name = contract_arg.get("name") arg_name = contract_arg.get("name")
arg_type = contract_arg.get("type") arg_type = contract_arg.get("type")
if arg_name not in ["_decimals", "_name", "_symbol"]: if arg_name not in ["_decimals", "_name", "_symbol"]:
val = input(f"Enter value for {arg_name} ({arg_type}): ") val = input(f"Enter value for {arg_name} ({arg_type}): ")
extra_args.append(val) contract_extra_args.append(val)
extra_args_types.append(arg_type) if arg_type == "uint128":
contract_extra_args_types.append("uint256")
else:
contract_extra_args_types.append(arg_type)
return { return {
"bin_path": bin_path, "bin_path": bin_path,
"json_path": json_path, "json_path": json_path,
"extra_args": extra_args, "extra_args": contract_extra_args,
"extra_args_types": extra_args_types, "extra_args_types": contract_extra_args_types,
} }
def init_token( def init_token(directory: str, code=""):
directory: str,
code="",
extra_args=[],
extra_args_types=[],
):
contract = select_contract() contract = select_contract()
code = contract["bin_path"] code = contract["bin_path"]
extra_args = contract["extra_args"] contract_extra_args = contract["extra_args"]
extra_args_types = contract["extra_args_types"] contract_extra_args_types = contract["extra_args_types"]
name = input("Enter Token Name (Foo Token): ") or "Foo Token" name = input("Enter Token Name (Foo Token): ") or "Foo Token"
symbol = input("Enter Token Symbol (FOO): ") or "FOO" symbol = input("Enter Token Symbol (FOO): ") or "FOO"
@ -175,8 +177,8 @@ def init_token(
name=name, name=name,
symbol=symbol, symbol=symbol,
precision=precision, precision=precision,
extra_args=extra_args, extra_args=contract_extra_args,
extra_args_types=extra_args_types, extra_args_types=contract_extra_args_types,
supply=supply, supply=supply,
code=code, code=code,
) )
@ -230,8 +232,30 @@ def init_attachment(directory):
return contract_attchment return contract_attchment
def load_contract(directory) -> Contract:
token = Token(path=directory)
proof = Proof(path=directory)
meta = Meta(path=directory)
attachment = Attachment(path=directory)
network = Network(directory)
token.load()
proof.load()
meta.load()
attachment.load()
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def init_network( def init_network(
directory, registry_address,key_account_address, chain_spec, rpc_provider, targets=["eth"] directory,
options: Options,
targets=["eth"],
): ):
contract_network = Network(directory, targets=targets) contract_network = Network(directory, targets=targets)
contract_network.start() contract_network.start()
@ -240,17 +264,19 @@ def init_network(
m = importlib.import_module(f"cic.ext.{target}.start") m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start( m.extension_start(
contract_network, contract_network,
registry_address=registry_address, registry_address=options.contract_registry,
chain_spec=chain_spec, chain_spec=options.chain_spec,
rpc_provider=rpc_provider, rpc_provider=options.rpc_provider,
key_account_address=key_account_address key_account_address=options.key_account,
) )
contract_network.load() contract_network.load()
return contract_network return contract_network
def execute(config, eargs):
directory = eargs.path
def generate(directory: str, target: str, options: Options) -> Contract:
if os.path.exists(directory): if os.path.exists(directory):
contine = input( contine = input(
"Directory already exists, Would you like to delete it? (y/n): " "Directory already exists, Would you like to delete it? (y/n): "
@ -261,37 +287,55 @@ def execute(config, eargs):
else: else:
print(f"Deleted {directory}") print(f"Deleted {directory}")
os.system(f"rm -rf {directory}") os.system(f"rm -rf {directory}")
target = eargs.target
if not eargs.skip_gen:
os.makedirs(directory) os.makedirs(directory)
token = init_token(directory)
proof = init_proof(directory)
meta = init_meta(directory)
attachment = init_attachment(directory)
network = init_network(
directory,
options,
targets=[target],
)
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def get_options(ctrl: CmdCtrl) -> Options:
# Defaults # Defaults
default_contract_registry = config.get( default_contract_registry = ctrl.config.get(
"CIC_CONTRACT_REGISTRY_ADDRESS", "CIC_REGISTRY_ADDRESS",
"0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299", "0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299", # Comes from /home/will/grassroots/cic-staff-installer/var/cic-staff-client/CIC_REGISTRY_ADDRESS
) )
default_key_account = config.get( default_key_account = ctrl.config.get(
"CIC_KEY_ACCOUNT_ADDRESS", "eb3907ecad74a0013c259d5874ae7f22dcbcc95c" "AUTH_KEY",
"eb3907ecad74a0013c259d5874ae7f22dcbcc95c", # comes from wallet `eth-keyfile -z -d $WALLET_KEY_FILE`
) )
default_metadata_endpoint = "http://localhost:63380" or config.get( # https://meta.grassrootseconomics.net
"META_ENDPOINT", "http://localhost:63380" # https://auth.grassrootseconomics.net Authenticated Meta
)
default_wallet_keyfile = config.get( default_metadata_endpoint = ctrl.config.get("META_URL", "https://auth.grassecon.net")
# Keyring folder needs to be dumped out as a private key file from $HOME/.config/cic/staff-client/.gnupg
default_wallet_keyfile = ctrl.config.get(
"WALLET_KEY_FILE", "WALLET_KEY_FILE",
"/home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc", "/home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc",
) # Show possible wallet keys
# Should be an input???
default_wallet_passphrase = ctrl.config.get("WALLET_PASSPHRASE", "merman")
default_chain_spec = ctrl.config.get("CHAIN_SPEC", "evm:byzantium:8996:bloxberg")
default_rpc_provider = ctrl.config.get(
"RPC_PROVIDER", "https://rpc.grassecon.net"
) )
default_wallet_passphrase = config.get("WALLET_PASSPHRASE", "merman")
default_chain_spec = config.get("CHAIN_SPEC", "evm:byzantium:8996:bloxberg")
default_rpc_provider = config.get("RPC_PROVIDER", "http://localhost:63545")
# Options
contract_registry = ( contract_registry = (
input(f"Enter Contract Registry ({default_contract_registry}): ") input(f"Enter Contract Registry ({default_contract_registry}): ")
or default_contract_registry or default_contract_registry
) )
rpc_provider = ( rpc_provider = (
input(f"Enter RPC Provider ({default_rpc_provider}): ") input(f"Enter RPC Provider ({default_rpc_provider}): ") or default_rpc_provider
or default_rpc_provider
) )
chain_spec = ChainSpec.from_chain_str( chain_spec = ChainSpec.from_chain_str(
(input(f"Enter ChainSpec ({default_chain_spec}): ") or default_chain_spec) (input(f"Enter ChainSpec ({default_chain_spec}): ") or default_chain_spec)
@ -303,35 +347,59 @@ def execute(config, eargs):
input(f"Enter Metadata Endpoint ({default_metadata_endpoint}): ") input(f"Enter Metadata Endpoint ({default_metadata_endpoint}): ")
or default_metadata_endpoint or default_metadata_endpoint
) )
auth_passphrase = ctrl.config.get(
token = init_token(directory) "AUTH_PASSPHRASE"
proof = init_proof(directory)
meta = init_meta(directory)
attachment = init_attachment(directory)
network = init_network(
directory,
registry_address=contract_registry,
key_account_address=key_account,
chain_spec=chain_spec,
rpc_provider=rpc_provider,
targets=[target],
) )
print(f"[cic.header]\nversion = {proof.version()}\n") auth_keyfile_path = ctrl.config.get(
print(f"[cic.token]\n{token}") "AUTH_KEYFILE_PATH"
print(f"[cic.proof]\n{proof}") )
print(f"[cic.meta]\n{meta}") auth_db_path = ctrl.config.get("AUTH_DB_PATH")
print(f"[cic.attachment]\n{attachment}") return Options(
print(f"[cic.network]\n{network}") auth_db_path,
if not eargs.skip_deploy: auth_keyfile_path,
auth_passphrase,
contract_registry,
key_account,
chain_spec,
rpc_provider,
metadata_endpoint,
default_wallet_keyfile,
default_wallet_passphrase,
)
def print_contract(contract: Contract):
print(f"[cic.header]\nversion = {contract.proof.version()}\n")
print(f"[cic.token]\n{contract.token}")
print(f"[cic.proof]\n{contract.proof}")
print(f"[cic.meta]\n{contract.meta}")
print(f"[cic.attachment]\n{contract.attachment}")
print(f"[cic.network]\n{contract.network}")
def execute(ctrl: CmdCtrl):
directory = ctrl.config.get("_TOKEN_PATH")
target = ctrl.config.get("_TOKEN_TARGET")
skip_gen = ctrl.config.get("_TOKEN_SKIP_GEN")
skip_deploy = ctrl.config.get("_TOKEN_SKIP_DEPLOY")
options = get_options(ctrl)
if not skip_gen:
contract = generate(directory, target, options)
else:
contract = load_contract(directory)
print_contract(contract)
if not skip_deploy:
ready_to_deploy = input("Ready to deploy? (y/n): ") ready_to_deploy = input("Ready to deploy? (y/n): ")
if ready_to_deploy == "y": if ready_to_deploy == "y":
deploy( deploy(
config, ctrl=ctrl,
contract_directory=directory, contract_directory=directory,
gpg_passphrase=default_wallet_passphrase, options=options,
key_file_path=default_wallet_keyfile, keystore_directory="/home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore", # Meta Signer meta.ge.net but not auth.ge.net(usumbufu determins if you can even interact with the server) and this ensures data integrity
metadata_endpoint=metadata_endpoint,
keystore_directory="/home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore",
target=target, target=target,
) )
print("Deployed") print("Deployed")
@ -339,13 +407,6 @@ def execute(config, eargs):
print("Not deploying") print("Not deploying")
#
#
# rpc="http://localhost:63545"
# python -m cic.runnable.cic_cmd init --target eth --name "$token_name" --symbol $token_symbol --precision 6 $token_symbol_lowercase
# python -m cic.runnable.cic_cmd ext -p $rpc -i $chain_spec --registry $contract_registry -d $token_symbol_lowercase eth -vv
# python -m cic.runnable.cic_cmd export -p $rpc --metadata-endpoint http://localhost:63380 -vv -y /home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore -o $token_symbol_lowercase/out -d $token_symbol_lowercase eth
if __name__ == "__main__": if __name__ == "__main__":
execute() # execute()
print("Not Implemented")

42
cic/crypt/aes.py Normal file
View File

@ -0,0 +1,42 @@
# standard imports
import os
import logging
import hashlib
from Crypto.Cipher import AES
from Crypto.Util import Counter
from .base import Encrypter
logg = logging.getLogger(__name__)
class AESCTREncrypt(Encrypter):
aes_block_size = 1 << 7
counter_bytes = int(128 / 8)
def __init__(self, db_dir, secret):
self.secret = secret
def key_to_iv(self, k):
h = hashlib.sha256()
h.update(k.encode('utf-8'))
h.update(self.secret)
z = h.digest()
return int.from_bytes(z[:self.counter_bytes], 'big')
def encrypt(self, k, v):
iv = self.key_to_iv(k)
ctr = Counter.new(self.aes_block_size, initial_value=iv)
cipher = AES.new(self.secret, AES.MODE_CTR, counter=ctr)
return cipher.encrypt(v)
def decrypt(self, k, v):
iv = self.key_to_iv(k)
ctr = Counter.new(self.aes_block_size, initial_value=iv)
cipher = AES.new(self.secret, AES.MODE_CTR, counter=ctr)
return cipher.decrypt(v)

8
cic/crypt/base.py Normal file
View File

@ -0,0 +1,8 @@
class Encrypter:
def encrypt(self, v):
raise NotImplementedError()
def decrypt(self, v):
raise NotImplementedError()

View File

@ -5,8 +5,19 @@ proof_writer = cic.output.KVWriter
ext_writer = cic.output.KVWriter ext_writer = cic.output.KVWriter
[cic] [cic]
contract_registry_address = registry_address = 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
key_account_address =
[meta] [meta]
endpoint=https://meta.grassecon.net url = https://auth.grassecon.net
http_origin =
[auth]
type = gnupg
db_path = /home/will/.local/share/cic/clicada
keyfile_path = /home/will/.config/cic/staff-client/user.asc
keyring_path = /home/will/.config/cic/staff-client/.gnupg
key = CCE2E1D2D0E36ADE0405E2D0995BB21816313BD5
passphrase =

6
cic/errors.py Normal file
View File

@ -0,0 +1,6 @@
class AuthError(Exception):
pass
class MetadataNotFoundError(Exception):
pass

View File

@ -198,7 +198,7 @@ class CICEth(Extension):
writer.write('token_address', self.token_address.encode('utf-8')) writer.write('token_address', self.token_address.encode('utf-8'))
self.add_outputs('token', r) self.add_outputs('token', r)
if self.token_details['supply'] > 0: if int(self.token_details['supply']) > 0:
c = GiftableToken(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle) c = GiftableToken(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle)
o = c.mint_to(self.token_address, self.resources['token']['key_account'], self.resources['token']['key_account'], self.token_details['supply']) o = c.mint_to(self.token_address, self.resources['token']['key_account'], self.resources['token']['key_account'], self.token_details['supply'])
r = None r = None

View File

@ -1,11 +1,13 @@
# standard imports # standard imports
import logging import logging
from typing import TYPE_CHECKING
# external imports # external imports
from hexathon import valid as valid_hex from hexathon import valid as valid_hex
# local imports # local imports
from cic.output import StdoutWriter from cic.output import StdoutWriter
from cic.token import Token
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -26,7 +28,16 @@ class Extension:
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__(self, chain_spec, resources, proof, signer=None, rpc=None, outputs_writer=StdoutWriter()):
def __init__(
self,
chain_spec,
resources,
proof,
signer=None,
rpc=None,
outputs_writer=StdoutWriter(),
):
self.resources = resources self.resources = resources
self.proof = proof self.proof = proof
self.chain_spec = chain_spec self.chain_spec = chain_spec
@ -38,9 +49,8 @@ class Extension:
self.outputs = [] self.outputs = []
self.outputs_writer = outputs_writer self.outputs_writer = outputs_writer
# TODO: apply / prepare token can be factored out # TODO: apply / prepare token can be factored out
def apply_token(self, token): def apply_token(self, token: Token):
"""Initialize extension with token data from settings. """Initialize extension with token data from settings.
:param token: Token object :param token: Token object
@ -48,10 +58,27 @@ class Extension:
:rtype: dict :rtype: dict
:returns: Token data state of extension after load :returns: Token data state of extension after load
""" """
return self.prepare_token(token.name, token.symbol, token.precision, token.code, token.supply) return self.prepare_token(
token.name,
token.symbol,
token.precision,
token.code,
token.supply,
token.extra_args,
token.extra_args_types,
)
def prepare_token(
def prepare_token(self, name, symbol, precision, code, supply, extra=[], extra_types=[], positions=None): self,
name,
symbol,
precision,
code,
supply,
extra=[],
extra_types=[],
positions=None,
):
"""Initialize extension token data. """Initialize extension token data.
:param name: Token name :param name: Token name
@ -74,24 +101,22 @@ class Extension:
:returns: Token data state of extension after load :returns: Token data state of extension after load
""" """
self.token_details = { self.token_details = {
'name': name, "name": name,
'symbol': symbol, "symbol": symbol,
'precision': precision, "precision": precision,
'code': code, "code": code,
'supply': supply, "supply": supply,
'extra': extra, "extra": extra,
'extra_types': extra_types, "extra_types": extra_types,
'positions': positions, "positions": positions,
} }
logg.debug(f"token details: {self.token_details}")
return self.token_details return self.token_details
def prepare_extension(self): def prepare_extension(self):
"""Prepare extension for publishing (noop) """Prepare extension for publishing (noop)"""
"""
pass pass
def parse_code_as_file(self, v): def parse_code_as_file(self, v):
"""Helper method to load application bytecode from file into extensions token data state. """Helper method to load application bytecode from file into extensions token data state.
@ -101,18 +126,17 @@ class Extension:
:type v: str :type v: str
""" """
try: try:
f = open(v, 'r') f = open(v, "r")
r = f.read() r = f.read()
f.close() f.close()
self.parse_code_as_hex(r) self.parse_code_as_hex(r)
except FileNotFoundError as e: except FileNotFoundError as e:
logg.debug('could not parse code as file: {}'.format(e)) logg.debug("could not parse code as file: {}".format(e))
pass pass
except IsADirectoryError as e: except IsADirectoryError as e:
logg.debug('could not parse code as file: {}'.format(e)) logg.debug("could not parse code as file: {}".format(e))
pass pass
def parse_code_as_hex(self, v): def parse_code_as_hex(self, v):
"""Helper method to load application bytecode from hex data into extension token data state. """Helper method to load application bytecode from hex data into extension token data state.
@ -124,10 +148,9 @@ class Extension:
try: try:
self.token_code = valid_hex(v) self.token_code = valid_hex(v)
except ValueError as e: except ValueError as e:
logg.debug('could not parse code as hex: {}'.format(e)) logg.debug("could not parse code as hex: {}".format(e))
pass pass
def load_code(self, hint=None): def load_code(self, hint=None):
"""Attempt to load token application bytecode using token settings. """Attempt to load token application bytecode using token settings.
@ -136,8 +159,8 @@ class Extension:
:rtype: str (hex) :rtype: str (hex)
:return: Bytecode loaded into extension token data state :return: Bytecode loaded into extension token data state
""" """
code = self.token_details['code'] code = self.token_details["code"]
if hint == 'hex': if hint == "hex":
self.token_code = valid_hex(code) self.token_code = valid_hex(code)
for m in [ for m in [
@ -149,11 +172,10 @@ class Extension:
break break
if self.token_code == None: if self.token_code == None:
raise RuntimeError('could not successfully parse token code') raise RuntimeError("could not successfully parse token code")
return self.token_code return self.token_code
def process(self, writer=None): def process(self, writer=None):
"""Adapter used by Processor to process the extensions implementing the Extension base class. """Adapter used by Processor to process the extensions implementing the Extension base class.
@ -168,25 +190,25 @@ class Extension:
writer = self.outputs_writer writer = self.outputs_writer
tasks = [] tasks = []
self.token_address = self.resources['token']['reference'] self.token_address = self.resources["token"]["reference"]
# TODO: get token details when token address is not none # TODO: get token details when token address is not none
if self.token_address == None: if self.token_address == None:
if self.token_details['code'] == None: if self.token_details["code"] == None:
raise RuntimeError('neither token address nor token code has been set') raise RuntimeError("neither token address nor token code has been set")
self.load_code() self.load_code()
tasks.append('token') tasks.append("token")
for k in self.resources.keys(): for k in self.resources.keys():
if k == 'token': if k == "token":
continue continue
if self.resources[k]['reference'] != None: if self.resources[k]["reference"] != None:
tasks.append(k) tasks.append(k)
self.prepare_extension() self.prepare_extension()
for task in tasks: for task in tasks:
logg.debug('extension adapter process {}'.format(task)) logg.debug("extension adapter process {}".format(task))
r = getattr(self, 'process_' + task)(writer=writer) r = getattr(self, "process_" + task)(writer=writer)
return (self.token_address, self.token_details.get('symbol')) return (self.token_address, self.token_details.get("symbol"))

111
cic/http.py Normal file
View File

@ -0,0 +1,111 @@
# standard imports
import hashlib
import logging
import os
import ssl
import urllib.parse
from http.client import HTTPResponse
from socket import getservbyname
from urllib.request import HTTPSHandler
# external imports
from usumbufu.client.base import BaseTokenStore, ClientSession
from usumbufu.client.bearer import BearerClientSession
from usumbufu.client.hoba import HobaClientSession
logg = logging.getLogger(__name__)
class PGPClientSession(HobaClientSession):
alg = "969"
def __init__(self, auth):
self.auth = auth
self.origin = None
self.fingerprint = self.auth.fingerprint()
def sign_auth_challenge(self, plaintext, hoba, encoding):
passphrase = self.auth.get_passphrase()
r = self.auth.sign(plaintext, encoding, passphrase=passphrase, detach=True)
hoba.signature = r
return str(hoba)
def __str__(self):
return "clicada hoba/pgp auth"
def __repr__(self):
return "clicada hoba/pgp auth"
class HTTPSession:
token_dir = f"/run/user/{os.getuid()}/clicada/usumbufu/.token"
def __init__(self, url, auth=None, origin=None):
self.base_url = url
url_parts = urllib.parse.urlsplit(self.base_url)
url_parts_origin_host = url_parts[1].split(":")
host = url_parts_origin_host[0]
try:
host = host + ":" + url_parts_origin_host[1]
except IndexError:
host = host + ":" + str(getservbyname(url_parts[0]))
logg.info(
f"changed origin with missing port number from {url_parts[1]} to {host}"
)
url_parts_origin = (
url_parts[0],
host,
"",
"",
"",
)
self.origin = origin
if self.origin is None:
self.origin = urllib.parse.urlunsplit(url_parts_origin)
else:
logg.debug(f"overriding http origin for {url} with {self.origin}")
h = hashlib.sha256()
h.update(self.base_url.encode("utf-8"))
z = h.digest()
token_store_dir = os.path.join(self.token_dir, z.hex())
os.makedirs(token_store_dir, exist_ok=True)
self.token_store = BaseTokenStore(path=token_store_dir)
logg.debug(
f"token store: \n{self.token_store}\n origin: {self.origin}\n token_store_dir: {token_store_dir}\n"
)
self.session = ClientSession(self.origin, token_store=self.token_store)
bearer_handler = BearerClientSession(self.origin, token_store=self.token_store)
self.session.add_subhandler(bearer_handler)
if auth is not None:
auth.origin = self.origin
self.session.add_subhandler(auth)
ctx = ssl.create_default_context()
ctx.load_verify_locations(
capath="/home/will/grassroots/cic-staff-installer/keys/ge.ca"
)
https_handler = HTTPSHandler(context=ctx)
self.session.add_parent(parent=https_handler)
self.opener = urllib.request.build_opener(self.session)
def open(self, url, method=None, data: bytes = None, headers=None):
logg.debug(f"headers: {headers}")
logg.debug(f"token store: \n{self.token_store}\n origin: {self.origin}")
req = urllib.request.Request(url=url, data=data, headers=headers, method=method)
logg.debug(f"open {url} with opener {self}")
logg.debug(req.get_full_url())
logg.debug(f"handlers {self.opener.handlers}")
response: HTTPResponse = self.opener.open(req)
status = response.getcode()
logg.debug(f"{url} returned {status}")
return response.read().decode("utf-8")
def __str__(self):
return str(self.session)

View File

@ -1,16 +1,24 @@
from __future__ import annotations
# standard imports # standard imports
import base64 import base64
import json import json
import logging import logging
import os import os
# types
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
# external imports # external imports
from cic_types import MetadataPointer from cic_types import MetadataPointer
from cic_types.ext.metadata import MetadataRequestsHandler
from cic_types.processor import generate_metadata_pointer from cic_types.processor import generate_metadata_pointer
from hexathon import strip_0x from hexathon import strip_0x
from cic.MetaRequestHandler import MetadataRequestsHandler
from cic.output import OutputWriter from cic.output import OutputWriter
from cic.utils import object_to_str
# local imports # local imports
from .base import Data, data_dir from .base import Data, data_dir
@ -28,103 +36,94 @@ class Meta(Data):
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__(self, path='.', writer=None, name="", location="", country_code="", contact={}):
def __init__(
self, path=".", writer=None, name="", location="", country_code="", contact={}
):
super(Meta, self).__init__() super(Meta, self).__init__()
self.name = None self.name = name
self.contact = contact self.contact = contact
self.country_code = country_code self.country_code = country_code
self.location = location self.location = location
self.path = path self.path = path
self.writer = writer self.writer = writer
self.meta_path = os.path.join(self.path, 'meta.json') self.meta_path = os.path.join(self.path, "meta.json")
def load(self): def load(self):
"""Load metadata from settings. """Load metadata from settings."""
"""
super(Meta, self).load() super(Meta, self).load()
f = open(self.meta_path, 'r') f = open(self.meta_path, "r", encoding="utf-8")
o = json.load(f) o = json.load(f)
f.close() f.close()
self.name = o['name'] self.name = o["name"]
self.contact = o['contact'] self.contact = o["contact"]
self.country_code = o['country_code'] self.country_code = o["country_code"]
self.location = o['location'] self.location = o["location"]
self.inited = True self.inited = True
def start(self): def start(self):
"""Initialize metadata settings from template. """Initialize metadata settings from template."""
"""
super(Meta, self).start() super(Meta, self).start()
meta_template_file_path = os.path.join(data_dir, 'meta_template_v{}.json'.format(self.version())) meta_template_file_path = os.path.join(
data_dir, f"meta_template_v{self.version()}.json"
)
f = open(meta_template_file_path) f = open(meta_template_file_path, encoding="utf-8")
o = json.load(f) o = json.load(f)
f.close() f.close()
o['name'] = self.name o["name"] = self.name
o['contact'] = self.contact o["contact"] = self.contact
o['country_code'] = self.country_code o["country_code"] = self.country_code
o['location'] = self.location o["location"] = self.location
f = open(self.meta_path, 'w') f = open(self.meta_path, "w", encoding="utf-8")
json.dump(o, f, sort_keys=True, indent="\t") json.dump(o, f, sort_keys=True, indent="\t")
f.close() f.close()
def reference(self, token_address): def reference(self, token_address):
"""Calculate the mutable reference for the token metadata. """Calculate the mutable reference for the token metadata."""
"""
token_address_bytes = bytes.fromhex(strip_0x(token_address)) token_address_bytes = bytes.fromhex(strip_0x(token_address))
return generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META) return generate_metadata_pointer(
token_address_bytes, MetadataPointer.TOKEN_META
)
def asdict(self): def asdict(self):
"""Output proof state to dict. """Output proof state to dict."""
"""
return { return {
'name': self.name, "name": self.name,
'contact': self.contact, "country_code": self.country_code,
"location": self.location,
"contact": self.contact,
} }
def process(self, token_address=None, token_symbol=None, writer=None): def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish metadata. """Serialize and publish metadata.
See cic.processor.Processor.process See cic.processor.Processor.process
""" """
if writer == None: if writer is None:
writer = self.writer writer = self.writer
v = json.dumps(self.asdict()) v = json.dumps(self.asdict())
token_address_bytes = bytes.fromhex(strip_0x(token_address)) token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META) k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
writer.write(k, v.encode('utf-8')) writer.write(k, v.encode("utf-8"))
token_symbol_bytes = token_symbol.encode('utf-8') token_symbol_bytes = token_symbol.encode("utf-8")
k = generate_metadata_pointer(token_symbol_bytes, MetadataPointer.TOKEN_META_SYMBOL) k = generate_metadata_pointer(
writer.write(k, v.encode('utf-8')) token_symbol_bytes, MetadataPointer.TOKEN_META_SYMBOL
)
writer.write(k, v.encode("utf-8"))
return (k, v) return (k, v)
def __str__(self): def __str__(self):
s = f"contact.name = {self.name}\n" return object_to_str(self, ["name", "contact", "country_code", "location"])
s = f"contact.country_code = {self.country_code}\n"
s = f"contact.location = {self.location}\n"
for contact_key in self.contact.keys():
contact_value = self.contact[contact_key]
if not contact_value:
s += f"contact.{contact_key} = \n"
continue
s += f"contact.{contact_key} = {contact_value}\n"
return s
class MetadataWriter(OutputWriter): class MetadataWriter(OutputWriter):
@ -138,14 +137,13 @@ class MetadataWriter(OutputWriter):
def write(self, k, v): def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k)) rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try: try:
v = v.decode('utf-8') v = v.decode("utf-8")
v = json.loads(v) v = json.loads(v)
logg.debug('metadatawriter bindecode {} {}'.format(k, v)) logg.debug(f"metadatawriter bindecode {k} {v}")
except UnicodeDecodeError: except UnicodeDecodeError:
v = base64.b64encode(v).decode('utf-8') v = base64.b64encode(v).decode("utf-8")
v = json.loads(json.dumps(v)) v = json.loads(json.dumps(v))
logg.debug('metadatawriter b64encode {} {}'.format(k, v)) logg.debug(f"metadatawriter b64encode {k} {v}")
r = rq.create(v) r = rq.create(v)
logg.info('metadata submitted at {}'.format(k)) logg.info(f"metadata submitted at {k}")
return r return r

33
cic/notify.py Normal file
View File

@ -0,0 +1,33 @@
# standard imports
import os
import sys
import shutil
class NotifyWriter:
def __init__(self, writer=sys.stdout):
(c, r) = shutil.get_terminal_size()
self.cols = c
self.fmt = "\r{:" + "<{}".format(c) + "}"
self.w = writer
self.notify_max = self.cols - 4
def notify(self, v):
if len(v) > self.notify_max:
v = v[:self.notify_max]
self.write('\x1b[0;36m... ' + v + '\x1b[0;39m')
def ouch(self, v):
if len(v) > self.notify_max:
v = v[:self.notify_max]
self.write('\x1b[0;91m!!! ' + v + '\x1b[0;39m')
def write(self, v):
s = str(v)
if len(s) > self.cols:
s = s[:self.cols]
self.w.write(self.fmt.format(s))

View File

@ -4,6 +4,7 @@ import sys
import logging import logging
import urllib.request import urllib.request
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -12,7 +13,7 @@ class OutputWriter:
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
pass pass
def write(self, k, v): def write(self, k, v, **kwargs):
raise NotImplementedError() raise NotImplementedError()
@ -51,7 +52,7 @@ class HTTPWriter(OutputWriter):
path = self.path path = self.path
if k != None: if k != None:
path = os.path.join(path, k) path = os.path.join(path, k)
logg.debug('http writer post {}'.format(path)) logg.debug(f'http writer post {path} \n key: {k}, value: {v}')
rq = urllib.request.Request(path, method='POST', data=v) rq = urllib.request.Request(path, method='POST', data=v)
r = urllib.request.urlopen(rq) r = urllib.request.urlopen(rq)
logg.info('http writer submitted at {}'.format(r.read())) logg.info('http writer submitted at {}'.format(r.read()))

View File

@ -18,18 +18,25 @@ class Processor:
:param extensions: Extension contexts to publish to :param extensions: Extension contexts to publish to
:type extensions: list of cic.extension.Extension :type extensions: list of cic.extension.Extension
""" """
def __init__(self, proof=None, attachment=None, metadata=None, outputs_writer=None, extensions=[]):
def __init__(
self,
proof=None,
attachment=None,
metadata=None,
outputs_writer=None,
extensions=[],
):
self.token_address = None self.token_address = None
self.extensions = extensions self.extensions = extensions
self.cores = { self.cores = {
'metadata': metadata, "metadata": metadata,
'attachment': attachment, "attachment": attachment,
'proof': proof, "proof": proof,
} }
self.outputs = [] self.outputs = []
self.__outputs_writer = outputs_writer self.__outputs_writer = outputs_writer
def writer(self): def writer(self):
"""Return the writer instance that the process is using. """Return the writer instance that the process is using.
@ -38,7 +45,6 @@ class Processor:
""" """
return self.__outputs_writer return self.__outputs_writer
def get_outputs(self): def get_outputs(self):
"""Return all written outputs. """Return all written outputs.
@ -53,7 +59,6 @@ class Processor:
outputs += self.outputs outputs += self.outputs
return outputs return outputs
def process(self, writer=None): def process(self, writer=None):
"""Serializes and publishes all token data. """Serializes and publishes all token data.
@ -66,18 +71,23 @@ class Processor:
""" """
tasks = [ tasks = [
'attachment', "attachment",
'proof', "proof",
'metadata', "metadata",
] ]
for ext in self.extensions: for ext in self.extensions:
(token_address, token_symbol) = ext.process() # (token_address, token_symbol) = ext.process()
token_address="1a4b2d1B564456f07d5920FeEcdF86077F7bba1E"
token_symbol="WILLY"
for task in tasks: for task in tasks:
a = self.cores.get(task) a = self.cores.get(task)
if a == None: if a == None:
logg.debug('skipping missing task receiver "{}"'.format(task)) logg.debug('skipping missing task receiver "{}"'.format(task))
continue continue
v = a.process(token_address=token_address, token_symbol=token_symbol, writer=self.__outputs_writer) v = a.process(
token_address=token_address,
token_symbol=token_symbol,
writer=self.__outputs_writer,
)
self.outputs.append(v) self.outputs.append(v)

View File

@ -1,19 +1,19 @@
# standard imports # standard imports
import os
import json import json
import logging import logging
import os
import tempfile import tempfile
import cbor2
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
# external imports # external imports
from hexathon import strip_0x from hexathon import strip_0x
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer from cic.utils import object_to_str
from cic_types.ext.metadata import MetadataRequestsHandler
# local imports # local imports
from .base import * from .base import *
from cic.output import OutputWriter
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -180,4 +180,6 @@ class Proof(Data):
return root_key return root_key
def __str__(self): def __str__(self):
return "description = {}\n".format(self.description) return object_to_str(
self, ["description", "issuer", "namespace", "version()", "proofs"]
)

19
cic/runnable/test_cmd.py Normal file
View File

@ -0,0 +1,19 @@
# standard imports
import sys
import logging
# local imports
from cic.cmd import CmdCtrl
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
ctrl = CmdCtrl(argv=sys.argv[1:], logger=logg)
def main():
ctrl.execute()
if __name__ == '__main__':
main()

View File

@ -105,10 +105,10 @@ class Token(Data):
f.close() f.close()
def __str__(self): def __str__(self):
s = """name = {} s = f"name = {self.name}\n"
symbol = {} s += f"symbol = {self.symbol}\n"
precision = {} s += f"precision = {self.precision}\n"
""".format( s += f"supply = {self.supply}\n"
self.name, self.symbol, self.precision for idx, extra in enumerate(self.extra_args):
) s += f"extra_args[{idx}]({self.extra_args_types[idx]}) = {extra}\n"
return s return s

24
cic/utils.py Normal file
View File

@ -0,0 +1,24 @@
def object_to_str(obj, keys):
"""Return a string representation of an object."""
s = ""
for key in keys:
value = eval("obj." + key)
key = key.replace("()", "")
if type(value) == str:
s += f"{key} = {value}\n"
elif type(value) == list:
for idx, vv in enumerate(value):
if not vv:
s += f"{key}[{idx}] = \n"
continue
s += f"{key}[{idx}] = {vv}\n"
elif type(value) == dict:
for vv_key in value.keys():
vv_value = value[vv_key]
if not vv_value:
s += f"{key}.{vv_key} = \n"
continue
s += f"{key}.{vv_key} = {vv_value}\n"
else:
s += f"{key} = {str(value)}\n"
return s

View File

@ -1,4 +1,4 @@
chainlib-eth~=0.0.13 chainlib-eth~=0.0.21
funga-eth~=0.5.1 funga-eth~=0.5.1
eth-token-index~=0.2.4 eth-token-index~=0.2.4
eth-address-index~=0.2.4 eth-address-index~=0.2.4

View File

@ -1,6 +1,6 @@
funga-eth~=0.5.1 funga-eth~=0.5.1
cic-types~=0.2.1a5 cic-types~=0.2.1a8
confini~=0.5.1 confini~=0.5.3
chainlib~=0.0.13 chainlib~=0.0.17
cbor2==5.4.1 cbor2==5.4.1
click==8.0.3 usumbufu==0.3.6

View File

@ -1,31 +1,55 @@
# standard imports # standard imports
import unittest
import logging import logging
import os import os
import unittest
# external imports
from hexathon import strip_0x
# local imports # local imports
from cic.meta import Meta from cic.meta import Meta
# external imports
from hexathon import strip_0x
# test imports # test imports
from tests.base_cic import ( from tests.base_cic import TestCICBase, test_data_dir
TestCICBase,
test_data_dir,
)
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger() logg = logging.getLogger()
class TestCICMeta(TestCICBase): class TestCICMeta(TestCICBase):
def test_meta(self): def test_meta(self):
fp = os.path.join(test_data_dir, 'proof') fp = os.path.join(test_data_dir, "proof")
m = Meta(fp) m = Meta(fp)
m.load() m.load()
self.assertEquals(
str(m),
"""name = Test
contact.phone = 0700-123456
country_code = KE
location = Kilifi
""",
)
def test_meta_with_initial_values(self):
fp = os.path.join(test_data_dir, "proof")
m = Meta(
fp,
name="TestName",
location="TestLocation",
country_code="TestCC",
contact={
"phone": "0723578455158",
},
)
self.assertEquals(
str(m),
"""name = TestName
contact.phone = 0723578455158
country_code = TestCC
location = TestLocation
""",
)
if __name__ == '__main__': if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -1,26 +1,39 @@
# standard imports # standard imports
import logging
import os import os
import unittest import unittest
import logging
# local imports # local imports
from cic import Proof from cic import Proof
from cic.attachment import Attachment from cic.attachment import Attachment
# test imports # test imports
from tests.base_cic import ( from tests.base_cic import TestCICBase, root_merged_hash, test_data_dir
test_data_dir,
TestCICBase,
root_merged_hash,
)
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger() logg = logging.getLogger()
class TestProof(TestCICBase): class TestProof(TestCICBase):
def test_proof(self):
proof_path = os.path.join(test_data_dir, "proof")
attach = Attachment(proof_path, writer=self.outputs_writer)
attach.load()
c = Proof(path=proof_path, attachments=attach)
c.load()
self.assertEquals(
str(c),
"""description = foo bar baz
issuer = the man
namespace = ge
version = 0
proofs[0] = 2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae
proofs[1] = fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9
""",
)
def test_proof_serialize_merge(self): def test_proof_serialize_merge(self):
proof_path = os.path.join(test_data_dir, 'proof') proof_path = os.path.join(test_data_dir, "proof")
attach = Attachment(proof_path, writer=self.outputs_writer) attach = Attachment(proof_path, writer=self.outputs_writer)
attach.load() attach.load()
@ -31,5 +44,5 @@ class TestProof(TestCICBase):
self.assertEqual(v, root_merged_hash) self.assertEqual(v, root_merged_hash)
if __name__ == '__main__': if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -1,7 +1,8 @@
{ {
"name": "", "name": "Test",
"location": "", "location": "Kilifi",
"country_code": "", "country_code": "KE",
"contact": { "contact": {
"phone": "0700-123456"
} }
} }