refactor: switch to poetry, add interactive deployment

This commit is contained in:
2022-03-01 10:01:56 +03:00
parent 45a6e5e79f
commit a2dfdbedb5
56 changed files with 4921 additions and 972 deletions

View File

View File

@@ -0,0 +1,82 @@
# standard imports
import logging
import os
# local imports
from cic.contract.base import Data, data_dir
logg = logging.getLogger(__name__)
class Attachment(Data):
"""Processes, serialized and publishes all attachments found in the "attachments" subdirectory of the settings directory.
:param path: Path to settings directory
:type path: str
:param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter
"""
def __init__(self, path=".", writer=None, interactive=False):
super(Attachment, self).__init__()
self.contents = {}
self.path = path
self.writer = writer
self.attachment_path = os.path.join(self.path, "attachments")
if interactive:
self.start()
input(
f"Please add attachment files to '{os.path.abspath(os.path.join(self.path,'attachments'))}' and then press ENTER to continue"
)
self.load()
def load(self):
"""Loads attachment data from settings."""
for s in os.listdir(self.attachment_path):
fp = os.path.realpath(os.path.join(self.attachment_path, s))
with open(fp, "rb") as f:
r = f.read()
z = self.hash(r).hex()
self.contents[z] = fp
logg.debug(f"loaded attachment file {fp} digest {z}")
def start(self):
"""Initialize attachment settings from template."""
super(Attachment, self).start()
os.makedirs(self.attachment_path)
def get(self, k):
"""Get a single attachment by the sha256 hash of the content.
:param k: Content hash
:type k: str (hex)
"""
return self.contents[k]
def asdict(self):
"""Output attachment state to dict"""
return self.contents
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish attachments.
See cic.processor.Processor.process
"""
if writer == None:
writer = self.writer
for key, value in self.contents.items():
fp = os.path.join(self.attachment_path, value)
with open(fp, "rb") as f:
data = f.read()
logg.debug(f"writing attachment {key}")
writer.write(key, data)
def __str__(self):
s = ""
for key, value in self.contents.items():
s += f"{key} = {value}\n" # self.digests[i].hex(), self.contents[i])
return s

View File

@@ -0,0 +1,163 @@
from __future__ import annotations
# standard imports
import os
import json
import logging
import base64
from typing import TYPE_CHECKING
# external imports
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
from hexathon import strip_0x
# local imports
from cic.contract.base import Data, data_dir
from cic.writers import OutputWriter
from cic_types.ext.metadata import MetadataRequestsHandler
from cic.utils import object_to_str
logg = logging.getLogger(__name__)
class Meta(Data):
"""Serialize and publish metadata for token.
The token metadata is any mutable data that is not part of the initial token proof, but published simultaneously as the token nonetheless.
:param path: Path to settings directory
:type path: str
:param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter
"""
def __init__(
self, path=".", writer=None, name="", location="", country_code="KE", contact={}, interactive=False
):
super(Meta, self).__init__()
self.name = name
self.contact = contact
self.country_code = country_code
self.location = location
self.path = path
self.writer = writer
self.meta_path = os.path.join(self.path, "meta.json")
if interactive:
self.name = input(f"Enter Metadata Name ({self.name}): ") or self.name
self.country_code = input(f"Enter Metadata Country Code ({self.country_code}): ") or self.country_code
self.location = input(f"Enter Metadata Location ({self.location}): ") or self.location
adding_contact_info = True
contact = {}
while adding_contact_info:
value = input("Enter Metadata contact info (e.g 'phone: +254723522718'): ") or None
if value:
data = value.split(":")
if len(data) != 2:
print("Invalid contact info, you must enter in the format 'key: value'")
continue
contact[data[0].strip()] = data[1].strip()
else:
adding_contact_info = False
self.contact = contact
def load(self):
"""Load metadata from settings."""
super(Meta, self).load()
f = open(self.meta_path, "r", encoding="utf-8")
o = json.load(f)
f.close()
self.name = o["name"]
self.contact = o["contact"]
self.country_code = o["country_code"]
self.location = o["location"]
self.inited = True
def start(self):
"""Initialize metadata settings from template."""
super(Meta, self).start()
meta_template_file_path = os.path.join(
data_dir, f"meta_template_v{self.version()}.json"
)
f = open(meta_template_file_path, encoding="utf-8")
o = json.load(f)
f.close()
o["name"] = self.name
o["contact"] = self.contact
o["country_code"] = self.country_code
o["location"] = self.location
f = open(self.meta_path, "w", encoding="utf-8")
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def reference(self, token_address):
"""Calculate the mutable reference for the token metadata."""
token_address_bytes = bytes.fromhex(strip_0x(token_address))
return generate_metadata_pointer(
token_address_bytes, MetadataPointer.TOKEN_META
)
def asdict(self):
"""Output proof state to dict."""
return {
"name": self.name,
"country_code": self.country_code,
"location": self.location,
"contact": self.contact,
}
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish metadata.
See cic.processor.Processor.process
"""
if writer is None:
writer = self.writer
v = json.dumps(self.asdict(), separators=(",", ":"))
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
writer.write(k, v.encode("utf-8"))
token_symbol_bytes = token_symbol.encode("utf-8")
k = generate_metadata_pointer(
token_symbol_bytes, MetadataPointer.TOKEN_META_SYMBOL
)
writer.write(k, v.encode("utf-8"))
return (k, v)
def __str__(self):
return object_to_str(self, ["name", "contact", "country_code", "location"])
class MetadataWriter(OutputWriter):
"""Custom writer for publishing data under immutable content-addressed pointers in the cic-meta storage backend.
Data that is not utf-8 will be converted to base64 before publishing.
Implements cic.writers.OutputWriter
"""
def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try:
v = v.decode("utf-8")
v = json.loads(v)
logg.debug(f"metadatawriter bindecode {k} {v}")
except UnicodeDecodeError:
v = base64.b64encode(v).decode("utf-8")
v = json.loads(json.dumps(v, separators=(",", ":")))
logg.debug(f"metadatawriter b64encode {k} {v}")
r = rq.create(v)
logg.info(f"metadata submitted at {k}")
return r

View File

@@ -0,0 +1,145 @@
# standard imports
import os
import json
import logging
# external imports
from chainlib.chain import ChainSpec
# local imports
from cic.contract.components.base import Data, data_dir
logg = logging.getLogger(__name__)
class Network(Data):
"""Contains network settings for token deployments across extensions.
Extension targets are defined by the keys immediately following the "resources" key in the network settings file.
:param path: Path to settings directory
:type path: str
:param targets: Extension targets to execute
:type targets: list of str
"""
def __init__(self, path='.', targets=[]):
super(Network, self).__init__()
self.resources = None
self.path = path
self.targets = targets
self.network_path = os.path.join(self.path, 'network.json')
def load(self):
"""Load network settings from file.
"""
super(Network, self).load()
f = open(self.network_path, 'r')
o = json.load(f)
f.close()
self.resources = o['resources']
self.inited = True
def start(self):
"""Initialize network settings with targets chosen at object instantiation.
Will save to network settings file.
"""
super(Network, self).load()
network_template_file_path = os.path.join(data_dir, f'network_template_v{self.version()}.json')
f = open(network_template_file_path)
o_part = json.load(f)
f.close()
self.resources = {}
for v in self.targets:
self.resources[v] = o_part
self.save()
def save(self):
"""Save network settings to file.
"""
f = open(self.network_path, 'w')
json.dump({
'resources': self.resources,
}, f, sort_keys=True, indent="\t")
f.close()
def resource(self, k):
"""Get settings definitions for a given extension.
:param k: Extension key
:type k: str
:rtype: dict
:return: Extension settings
"""
v = self.resources.get(k)
if v == None:
raise AttributeError('no defined reference for {}'.format(k))
return v
def resource_set(self, resource_key, content_key, reference, key_account=None):
"""Set the values a content part of an extension setting.
The content parts define network application resources. Each entry is keyed by the name of the application. Each value consists of a key_account used to write/deploy to the contract, and the reference (address) of the application resource. If no application resource yet exists on the network for the part, the reference value will be None.
:param resource_key: Extension key
:type resource_key: str
:param content_key: Resource name (e.g. smart contract name)
:type content_key: str
:param reference: Reference to resource on network (e.g. smart contract address)
:type reference: str
:param key_account: Address of account to sign transaction for the resource with
:type key_account: str
"""
self.resources[resource_key]['contents'][content_key]['reference'] = reference
self.resources[resource_key]['contents'][content_key]['key_account'] = key_account
def chain_spec(self, k):
"""Retrieve chain spec for the given extension
:param k: Extension key
:type k: str
:rtype: chainlib.chain.ChainSpec
:return: Chain spec object
"""
v = self.resource(k)
return ChainSpec.from_dict(v['chain_spec'])
def set(self, resource_key, chain_spec):
"""Set chain spec for resource.
:param resource_key: Extension key
:type resource_key: str
:param chain_spec: Chain spec to set
:type chain_spec: chainlib.chain.ChainSpec
"""
chain_spec_dict = chain_spec.asdict()
for k in chain_spec_dict.keys():
logg.debug('resources {}'.format(self.resources))
self.resources[resource_key]['chain_spec'][k] = chain_spec_dict[k]
def __str__(self):
s = ''
for resource in self.resources.keys():
for content_key in self.resources[resource]['contents'].keys():
content_value = self.resources[resource]['contents'][content_key]
if content_value == None:
content_value = ''
s += f'{resource}.{content_key} = {content_value}\n'
return s

View File

@@ -0,0 +1,192 @@
# standard imports
import json
import logging
import os
import tempfile
# external imports
from hexathon import strip_0x
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
# local imports
from cic.contract.base import Data, data_dir
from cic.utils import object_to_str
logg = logging.getLogger(__name__)
class Proof(Data):
"""Proof handles the immutable token proof data mapped to the initial token deployment.
It processes inputs from the proof.json file in the session directory.
Optionally, attachment objects can be added to the proof. If added, the resulting proof digest will consists of the attachment digests added to the root digest. These are then are deterministically ordered, regardless of which order attachments were given to the constructor.
:param path: Path to settings directory
:type path: str
:param attachments: List of attachment objects to include in the proof
:type attachments: cic.attachment.Attachment
:param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter
"""
def __init__(
self,
path=".",
description="",
namespace="ge",
issuer="",
attachments=None,
writer=None,
interactive=False,
):
super(Proof, self).__init__()
self.proofs = []
self.namespace = namespace
self.description = description
self.issuer = issuer
self.path = path
self.writer = writer
self.extra_attachments = attachments
self.attachments = {}
self.proof_path = os.path.join(self.path, "proof.json")
self.temp_proof_path = tempfile.mkstemp()[1]
if interactive:
self.description = (
input(f"Enter Proof Description ({self.description}): ") or self.description
)
self.namespace = (
input(f"Enter Proof Namespace ({self.namespace}): ") or self.namespace
)
self.issuer = input(f"Enter Proof Issuer ({self.issuer}): ") or self.issuer
def load(self):
"""Load proof data from settings."""
super(Proof, self).load()
f = open(self.proof_path, "r")
o = json.load(f)
f.close()
self.set_version(o["version"])
self.description = o["description"]
self.namespace = o["namespace"]
self.issuer = o["issuer"]
self.proofs = o["proofs"]
if self.extra_attachments != None:
a = self.extra_attachments.asdict()
for k in a.keys():
self.attachments[k] = a[k]
hshs = self.__get_ordered_hashes()
self.proofs = list(map(strip_0x, hshs))
self.inited = True
def start(self):
"""Initialize proof settings from template."""
super(Proof, self).start()
proof_template_file_path = os.path.join(
data_dir, f"proof_template_v{self.version()}.json"
)
with open(proof_template_file_path, "r", encoding="utf-8") as f:
o = json.load(f)
o["issuer"] = self.issuer
o["description"] = self.description
o["namespace"] = self.namespace
with open(self.proof_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
def asdict(self):
"""Output proof state to dict."""
return {
"version": self.version(),
"namespace": self.namespace,
"description": self.description,
"issuer": self.issuer,
"proofs": self.proofs,
}
# TODO: the efficiency of this method could probably be improved.
def __get_ordered_hashes(self):
ks = list(self.attachments.keys())
ks.sort()
return ks
# def get(self):
# hsh = self.hash(b).hex()
# self.attachments[hsh] = self.temp_proof_path
# logg.debug('cbor of {} is {} hashes to {}'.format(v, b.hex(), hsh))
def root(self):
"""Calculate the root digest from the serialized proof object."""
v = self.asdict()
# b = cbor2.dumps(v)
b = json.dumps(v, separators=(",", ":"))
with open(self.temp_proof_path, "w", encoding="utf-8") as f:
f.write(b)
b = b.encode("utf-8")
k = self.hash(b)
return (k.hex(), b)
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish proof.
See cic.processor.Processor.process
"""
if writer is None:
writer = self.writer
(k, v) = self.root()
writer.write(k, v)
root_key = k
token_symbol_bytes = token_symbol.encode("utf-8")
k = generate_metadata_pointer(
token_symbol_bytes, MetadataPointer.TOKEN_PROOF_SYMBOL
)
writer.write(k, v)
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_PROOF)
writer.write(k, v)
# (hsh, hshs) = self.get()
# hshs = list(map(strip_0x, hshs))
# hshs_bin = list(map(bytes.fromhex, hshs))
# hshs_cat = b''.join(hshs_bin)
# f = open(self.temp_proof_path, 'rb')
# v = f.read()
# f.close()
# writer.write(hsh, v)
# r = self.hash(hshs_cat)
# r_hex = r.hex()
# logg.debug('generated proof {} for hashes {}'.format(r_hex, hshs))
# writer.write(r_hex, hshs_cat)
o = self.asdict()
with open(self.proof_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
return root_key
def __str__(self):
return object_to_str(
self, ["description", "issuer", "namespace", "version()", "proofs"]
)

View File

@@ -0,0 +1,123 @@
# standard imports
import json
import os
# local imports
from cic.contract.base import Data, data_dir
from cic.contract.helpers import select_contract
class Token(Data):
"""Encapsulates the token data used by the extension to deploy and/or register token and token related applications on chain.
Token details (name, symbol etc) will be used to initialize the token settings when start is called. If load is called instead, any token detail parameters passed to the constructor will be overwritten by data stored in the settings.
:param path: Settings directory path
:type path: str
:param name: Token name
:type name: str
:param symbol: Token symbol
:type symbol: str
:param precision: Token value precision (number of decimals)
:type precision: int
:param supply: Token supply (in smallest precision units)
:type supply: int
:param code: Bytecode for token chain application
:type code: str (hex)
"""
def __init__(
self,
path=".",
name="Foo Token",
symbol="FOO",
precision=6,
supply=0,
code=None,
extra_args=[],
extra_args_types=[],
interactive=False,
):
super(Token, self).__init__()
self.name = name
self.symbol = symbol
self.supply = supply
self.precision = precision
self.code = code
self.extra_args = extra_args
self.extra_args_types = extra_args_types
self.path = path
self.token_path = os.path.join(self.path, "token.json")
if interactive:
contract = select_contract()
self.code = contract["bin_path"]
self.extra_args = contract["extra_args"]
self.extra_args_types = contract["extra_args_types"]
self.name = input(f"Enter Token Name ({self.name}): ") or self.name
self.symbol = input(f"Enter Token Symbol ({self.symbol}): ") or self.symbol
self.precision = input(f"Enter Token Precision ({self.precision}): ") or self.precision
self.supply = input(f"Enter Token Supply ({self.supply}): ") or self.supply
def load(self):
"""Load token data from settings."""
super(Token, self).load()
with open(self.token_path, "r", encoding="utf-8") as f:
o = json.load(f)
self.name = o["name"]
self.symbol = o["symbol"]
self.precision = o["precision"]
self.code = o["code"]
self.supply = o["supply"]
extras = []
extra_types = []
token_extras: list = o["extra"]
if token_extras:
for idx, token_extra in enumerate(token_extras):
arg = token_extra.get("arg")
arg_type = token_extra.get("arg_type")
if arg and arg_type:
extras.append(arg)
extra_types.append(arg_type)
elif (arg and not arg_type) or (not arg and arg_type):
raise ValueError(
f"Extra contract args must have a 'arg' and 'arg_type', Please check {self.token_path}:extra[{idx}] "
)
self.extra_args = extras
self.extra_args_types = extra_types
self.inited = True
def start(self):
"""Initialize token settings from arguments passed to the constructor and/or template."""
super(Token, self).load()
token_template_file_path = os.path.join(
data_dir, f"token_template_v{self.version()}.json"
)
with open(token_template_file_path, encoding="utf-8") as f:
o = json.load(f)
o["name"] = self.name
o["symbol"] = self.symbol
o["precision"] = self.precision
o["code"] = self.code
o["supply"] = self.supply
extra = []
for idx, extra_arg in enumerate(self.extra_args):
extra.append({"arg": extra_arg, "arg_type": self.extra_args_types[idx]})
if len(extra) != 0:
o["extra"] = extra
with open(self.token_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
def __str__(self):
s = f"name = {self.name}\n"
s += f"symbol = {self.symbol}\n"
s += f"precision = {self.precision}\n"
s += f"supply = {self.supply}\n"
for idx, extra in enumerate(self.extra_args):
s += f"extra_args[{idx}]({self.extra_args_types[idx]}) = {extra}\n"
return s