refactor: switch to poetry, add interactive deployment

This commit is contained in:
2022-03-01 10:01:56 +03:00
parent 45a6e5e79f
commit a2dfdbedb5
56 changed files with 4921 additions and 972 deletions

View File

@@ -1,2 +1 @@
from .proof import Proof
from .processor import Processor
__version__ = "0.0.2"

View File

@@ -1,39 +1,49 @@
# standard imports
import logging
import importlib
import logging
import os
from typing import Optional
# local imports
from cic import ContractProcessor, Proof
from cic.attachment import Attachment
from cic.meta import Meta, MetadataWriter
from cic.network import Network
from cic.writers import HTTPWriter, KeyedWriterFactory
from cic.token import Token
# external imports
from cic_types.ext.metadata import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner
# local imports
from cic import (
Proof,
Processor,
)
from cic.output import (
HTTPWriter,
KeyedWriterFactory,
)
from cic.meta import (
Meta,
MetadataWriter,
)
from cic.attachment import Attachment
from cic.network import Network
from cic.token import Token
logg = logging.getLogger(__name__)
def process_args(argparser):
argparser.add_argument('-d', '--directory', type=str, dest='directory', default='.', help='directory')
argparser.add_argument('-o', '--output-directory', type=str, dest='output_directory', help='output directory')
argparser.add_argument('--metadata-endpoint', dest='metadata_endpoint', type=str, help='metadata endpoint to interact with')
argparser.add_argument('-y', '--signer', type=str, dest='y', help='target-specific signer to use for export')
argparser.add_argument('-p', type=str, help='RPC endpoint')
argparser.add_argument('target', type=str, help='target network type')
argparser.add_argument(
"-d", "--directory", type=str, dest="directory", default=".", help="directory"
)
argparser.add_argument(
"-o",
"--output-directory",
type=str,
dest="output_directory",
help="output directory",
)
argparser.add_argument(
"--metadata-endpoint",
dest="metadata_endpoint",
type=str,
help="metadata endpoint to interact with",
)
argparser.add_argument(
"-y",
"--signer",
type=str,
dest="y",
help="target-specific signer to use for export",
)
argparser.add_argument("-p", type=str, help="RPC endpoint")
argparser.add_argument("target", type=str, help="target network type")
def validate_args(args):
@@ -42,23 +52,37 @@ def validate_args(args):
def init_writers_from_config(config):
w = {
'meta': None,
'attachment': None,
'proof': None,
'ext': None,
}
"meta": None,
"attachment": None,
"proof": None,
"ext": None,
}
for v in w.keys():
k = 'CIC_CORE_{}_WRITER'.format(v.upper())
(d, c) = config.get(k).rsplit('.', maxsplit=1)
k = "CIC_CORE_{}_WRITER".format(v.upper())
(d, c) = config.get(k).rsplit(".", maxsplit=1)
m = importlib.import_module(d)
o = getattr(m, c)
w[v] = o
return w
def execute(config, eargs):
modname = 'cic.ext.{}'.format(eargs.target)
ExtraArgs = {
"target": str,
"key_file_path": str,
"gpg_passphrase": str,
"directory": str,
"output_directory": str,
"metadata_endpoint": Optional[str],
"y": str,
}
def execute(config, eargs: ExtraArgs):
# !TODO Remove this
eargs.key_file_path = "/home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc"
eargs.gpg_passphrase = "merman"
modname = f"cic.ext.{eargs.target}"
cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config)
@@ -66,18 +90,26 @@ def execute(config, eargs):
output_writer_path_meta = eargs.output_directory
if eargs.metadata_endpoint != None:
MetadataRequestsHandler.base_url = eargs.metadata_endpoint
MetadataSigner.gpg_path = os.path.join('/tmp')
MetadataSigner.key_file_path = '/home/lash/src/client/cic/grassrootseconomics/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc'
MetadataSigner.gpg_passphrase = 'merman'
writers['proof'] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new
writers['attachment'] = KeyedWriterFactory(None, HTTPWriter).new
writers['meta'] = MetadataWriter
MetadataSigner.gpg_path = os.path.join("/tmp")
MetadataSigner.key_file_path = eargs.key_file_path
MetadataSigner.gpg_passphrase = eargs.gpg_passphrase
writers["proof"] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new
writers["attachment"] = KeyedWriterFactory(None, HTTPWriter).new
writers["meta"] = MetadataWriter
output_writer_path_meta = eargs.metadata_endpoint
ct = Token(path=eargs.directory)
cm = Meta(path=eargs.directory, writer=writers['meta'](path=output_writer_path_meta))
ca = Attachment(path=eargs.directory, writer=writers['attachment'](path=output_writer_path_meta))
cp = Proof(path=eargs.directory, attachments=ca, writer=writers['proof'](path=output_writer_path_meta))
cm = Meta(
path=eargs.directory, writer=writers["meta"](path=output_writer_path_meta)
)
ca = Attachment(
path=eargs.directory, writer=writers["attachment"](path=output_writer_path_meta)
)
cp = Proof(
path=eargs.directory,
attachments=ca,
writer=writers["proof"](path=output_writer_path_meta),
)
cn = Network(path=eargs.directory)
ca.load()
@@ -88,20 +120,29 @@ def execute(config, eargs):
chain_spec = None
try:
chain_spec = config.get('CHAIN_SPEC')
chain_spec = config.get("CHAIN_SPEC")
except KeyError:
chain_spec = cn.chain_spec
config.add(chain_spec, 'CHAIN_SPEC', exists_ok=True)
logg.debug('CHAIN_SPEC config set to {}'.format(str(chain_spec)))
config.add(chain_spec, "CHAIN_SPEC", exists_ok=True)
logg.debug(f"CHAIN_SPEC config set to {str(chain_spec)}")
#signer = cmd_mod.parse_signer(eargs.y)
# signer = cmd_mod.parse_signer(eargs.y)
(rpc, signer) = cmd_mod.parse_adapter(config, eargs.y)
ref = cn.resource(eargs.target)
chain_spec = cn.chain_spec(eargs.target)
logg.debug('found reference {} chain spec {} for target {}'.format(ref['contents'], chain_spec, eargs.target))
c = getattr(cmd_mod, 'new')(chain_spec, ref['contents'], cp, signer_hint=signer, rpc=rpc, outputs_writer=writers['ext'](path=eargs.output_directory))
logg.debug(
f"found reference {ref['contents']} chain spec {chain_spec} for target {eargs.target}"
)
c = getattr(cmd_mod, "new")(
chain_spec,
ref["contents"],
cp,
signer_hint=signer,
rpc=rpc,
outputs_writer=writers["ext"](path=eargs.output_directory),
)
c.apply_token(ct)
p = Processor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p = ContractProcessor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p.process()

View File

@@ -25,6 +25,6 @@ def execute(config, eargs):
chain_spec = ChainSpec.from_chain_str(eargs.i)
m = importlib.import_module('cic.ext.{}.start'.format(eargs.target))
m = importlib.import_module(f'cic.ext.{eargs.target}.start')
m.extension_start(cn, registry_address=eargs.registry, chain_spec=chain_spec, rpc_provider=config.get('RPC_PROVIDER'))

390
cic/cmd/wizard.py Normal file
View File

@@ -0,0 +1,390 @@
from __future__ import annotations
# standard import
import importlib
import json
import logging
import os
from typing import TYPE_CHECKING, List
import requests
# external imports
from chainlib.chain import ChainSpec
# local imports
from cic import Proof
from cic.actions.deploy import deploy
from cic.actions.types import Contract, Options
from cic.attachment import Attachment
from cic.meta import Meta
from cic.network import Network
from cic.token import Token
if TYPE_CHECKING:
from chainlib.cli.config import Config
log = logging.getLogger(__name__)
def process_args(argparser):
argparser.add_argument(
"--skip-gen", action="store_true", default=False, help="Skip Generation"
)
argparser.add_argument(
"--skip-deploy",
action="store_true",
help="Skip Deployment",
)
argparser.add_argument(
"--target",
default="eth",
help="Contract Target (eth)",
)
argparser.add_argument(
"path",
type=str,
help="Path to generate/use contract deployment info",
)
argparser.add_argument(
"-p",
type=str,
help="RPC Provider (http://localhost:8545)",
)
argparser.add_argument(
"-y",
type=str,
help="Wallet Keystore",
)
def validate_args(_args):
pass
CONTRACTS = [
{
"url": "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken",
"name": "Giftable Token",
},
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap",
"name": "Demurrage Token Single No Cap",
},
]
# Download File from Url
def download_file(url: str, directory: str, filename=None) -> (str, bytes):
os.makedirs(directory, exist_ok=True)
filename = filename if filename else url.split("/")[-1]
path = os.path.join(directory, filename)
if not os.path.exists(path):
log.debug(f"Downloading {filename}")
r = requests.get(url, allow_redirects=True)
open(path, "wb").write(r.content)
return path
return path
def get_contract_args(data: list):
for item in data:
if item["type"] == "constructor":
return item["inputs"]
raise Exception("No constructor found in contract")
def print_contract_args(json_path: str):
json_data = json.load(open(json_path, encoding="utf-8"))
print("Contract Args:")
for contract_arg in get_contract_args(json_data):
print(
f"\t{contract_arg.get('name', '<no name>')} - {contract_arg.get('type', '<no type>')}"
)
def select_contract():
print("Contracts:")
print("\t C - Custom (path/url to contract)")
for idx, contract in enumerate(CONTRACTS):
print(f"\t {idx} - {contract['name']}")
val = input("Select contract (C,0,1..): ")
if val.isdigit() and int(val) < len(CONTRACTS):
contract = CONTRACTS[int(val)]
directory = f"./contracts/{contract['name']}"
bin_path = os.path.abspath(download_file(contract["url"] + ".bin", directory))
json_path = download_file(contract["url"] + ".json", directory)
elif val == "C":
possible_bin_location = input("Enter path/url to contract: ")
# possible_bin_location is path
if possible_bin_location[0] == "." or possible_bin_location[0] == "/":
if os.path.exists(possible_bin_location):
bin_path = os.path.abspath(possible_bin_location)
else:
raise Exception(f"File {possible_bin_location} does not exist")
possible_json_path = val.replace(".bin", ".json")
if os.path.exists(possible_json_path):
json_path = possible_json_path
# possible_bin_location is url
else:
bin_path = download_file(possible_bin_location, directory)
else:
print("Invalid selection")
exit(1)
contract_extra_args = []
contract_extra_args_types = []
if os.path.exists(json_path):
json_data = json.load(open(json_path, encoding="utf-8"))
for contract_arg in get_contract_args(json_data):
arg_name = contract_arg.get("name")
arg_type = contract_arg.get("type")
if arg_name not in ["_decimals", "_name", "_symbol"]:
val = input(f"Enter value for {arg_name} ({arg_type}): ")
contract_extra_args.append(val)
if arg_type == "uint128":
contract_extra_args_types.append("uint256")
else:
contract_extra_args_types.append(arg_type)
return {
"bin_path": bin_path,
"json_path": json_path,
"extra_args": contract_extra_args,
"extra_args_types": contract_extra_args_types,
}
def init_token(directory: str, code=""):
contract = select_contract()
code = contract["bin_path"]
contract_extra_args = contract["extra_args"]
contract_extra_args_types = contract["extra_args_types"]
name = input("Enter Token Name (Foo Token): ") or "Foo Token"
symbol = input("Enter Token Symbol (FOO): ") or "FOO"
precision = input("Enter Token Precision (6): ") or 6
supply = input("Enter Token Supply (0): ") or 0
contract_token = Token(
directory,
name=name,
symbol=symbol,
precision=precision,
extra_args=contract_extra_args,
extra_args_types=contract_extra_args_types,
supply=supply,
code=code,
)
contract_token.start()
return contract_token
def init_proof(directory):
description = input("Enter Proof Description (None): ") or None
namespace = input("Enter Proof Namespace (ge): ") or "ge"
issuer = input("Enter Proof Issuer (None): ") or None
contract_proof = Proof(directory, description, namespace, issuer)
contract_proof.start()
return contract_proof
def init_meta(directory):
name = input("Enter Name (None): ") or ""
country_code = input("Enter Country Code (KE): ") or "KE"
location = input("Enter Location (None): ") or ""
adding_contact_info = True
contact = {}
while adding_contact_info:
value = input("Enter contact info (e.g 'phone: +254723522718'): ") or None
if value:
data = value.split(":")
if len(data) != 2:
print("Invalid contact info, you must enter in the format 'key: value'")
continue
contact[data[0].strip()] = data[1].strip()
else:
adding_contact_info = False
contract_meta = Meta(
directory,
name=name,
country_code=country_code,
location=location,
contact=contact,
)
contract_meta.start()
return contract_meta
def init_attachment(directory):
contract_attchment = Attachment(directory)
contract_attchment.start()
input(
f"Please add attachment files to '{os.path.abspath(os.path.join(directory,'attachments'))}' and then press ENTER to continue"
)
contract_attchment.load()
return contract_attchment
def load_contract(directory) -> Contract:
token = Token(path=directory)
proof = Proof(path=directory)
meta = Meta(path=directory)
attachment = Attachment(path=directory)
network = Network(directory)
token.load()
proof.load()
meta.load()
attachment.load()
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def init_network(
directory,
options: Options,
targets: List[str],
):
contract_network = Network(directory, targets=targets)
contract_network.start()
for target in targets:
m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start(
contract_network,
registry_address=options.contract_registry,
chain_spec=options.chain_spec,
rpc_provider=options.rpc_provider,
key_account_address=options.key_account,
)
contract_network.load()
return contract_network
def generate(directory: str, target: str, options: Options) -> Contract:
if os.path.exists(directory):
contine = input(
"Directory already exists, Would you like to delete it? (y/n): "
)
if contine.lower() != "y":
print("Exiting")
exit(1)
else:
print(f"Deleted {directory}")
os.system(f"rm -rf {directory}")
os.makedirs(directory)
token = init_token(directory)
proof = init_proof(directory)
meta = init_meta(directory)
attachment = init_attachment(directory)
network = init_network(
directory,
options,
targets=[target],
)
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def get_options(config: Config, eargs) -> Options:
# Defaults
default_contract_registry = config.get(
"CIC_REGISTRY_ADDRESS"
) # Comes from /home/will/grassroots/cic-staff-installer/var/cic-staff-client/CIC_REGISTRY_ADDRESS
default_key_account = config.get("AUTH_KEY")
# https://meta.grassrootseconomics.net
# https://auth.grassrootseconomics.net Authenticated Meta
default_metadata_endpoint = config.get("META_URL")
# Keyring folder needs to be dumped out as a private key file from $HOME/.config/cic/staff-client/.gnupg
default_wallet_keyfile = eargs.y or config.get(
"WALLET_KEY_FILE"
) # Show possible wallet keys
# Should be an input???
default_wallet_passphrase = config.get("WALLET_PASSPHRASE", "merman")
default_chain_spec = config.get("CHAIN_SPEC")
default_rpc_provider = config.get("RPC_PROVIDER")
contract_registry = (
input(f"Enter Contract Registry ({default_contract_registry}): ")
or default_contract_registry
)
rpc_provider = (
input(f"Enter RPC Provider ({default_rpc_provider}): ") or default_rpc_provider
)
chain_spec = ChainSpec.from_chain_str(
(input(f"Enter ChainSpec ({default_chain_spec}): ") or default_chain_spec)
)
key_account = (
input(f"Enter KeyAccount ({default_key_account}): ") or default_key_account
)
metadata_endpoint = (
input(f"Enter Metadata Endpoint ({default_metadata_endpoint}): ")
or default_metadata_endpoint
)
auth_passphrase = config.get("AUTH_PASSPHRASE")
auth_keyfile_path = config.get("AUTH_KEYFILE_PATH")
auth_db_path = config.get("AUTH_DB_PATH")
options = Options(
auth_db_path,
auth_keyfile_path,
auth_passphrase,
contract_registry,
key_account,
chain_spec,
rpc_provider,
metadata_endpoint,
default_wallet_keyfile,
default_wallet_passphrase,
)
print(options)
return options
ExtraArgs = {"skip_gen": str, "skip_deploy": str, "target": str, "path": str, "p": str}
def execute(config, eargs: ExtraArgs):
print(f"eargs: {eargs}")
directory = eargs.path
target = eargs.target
skip_gen = eargs.skip_gen
skip_deploy = eargs.skip_deploy
options = get_options(config, eargs)
if not skip_gen:
contract = generate(directory, target, options)
else:
contract = load_contract(directory)
print_contract(contract)
if not skip_deploy:
ready_to_deploy = input("Ready to deploy? (y/n): ")
if ready_to_deploy == "y":
deploy(
config=config,
contract_directory=directory,
options=options,
target=target,
)
print("Deployed")
else:
print("Not deploying")
if __name__ == "__main__":
# execute()
print("Not Implemented")

0
cic/contract/__init__.py Normal file
View File

View File

View File

@@ -1,9 +1,9 @@
# standard imports
import os
import logging
import os
# local imports
from .base import *
from cic.contract.base import Data, data_dir
logg = logging.getLogger(__name__)
@@ -14,38 +14,39 @@ class Attachment(Data):
:param path: Path to settings directory
:type path: str
:param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
def __init__(self, path='.', writer=None):
def __init__(self, path=".", writer=None, interactive=False):
super(Attachment, self).__init__()
self.contents = {}
self.path = path
self.writer = writer
self.attachment_path = os.path.join(self.path, 'attachments')
self.attachment_path = os.path.join(self.path, "attachments")
if interactive:
self.start()
input(
f"Please add attachment files to '{os.path.abspath(os.path.join(self.path,'attachments'))}' and then press ENTER to continue"
)
self.load()
def load(self):
"""Loads attachment data from settings.
"""
"""Loads attachment data from settings."""
for s in os.listdir(self.attachment_path):
fp = os.path.realpath(os.path.join(self.attachment_path, s))
f = open(fp, 'rb')
r = f.read()
f.close()
with open(fp, "rb") as f:
r = f.read()
z = self.hash(r).hex()
self.contents[z] = fp
logg.debug('loaded attachment file {} digest {}'.format(fp, z))
logg.debug(f"loaded attachment file {fp} digest {z}")
def start(self):
"""Initialize attachment settings from template.
"""
"""Initialize attachment settings from template."""
super(Attachment, self).start()
os.makedirs(self.attachment_path)
def get(self, k):
"""Get a single attachment by the sha256 hash of the content.
@@ -54,33 +55,28 @@ class Attachment(Data):
"""
return self.contents[k]
def asdict(self):
"""Output attachment state to dict
"""
"""Output attachment state to dict"""
return self.contents
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish attachments.
See cic.processor.Processor.process
See cic.processor.Processor.process
"""
if writer == None:
writer = self.writer
for k in self.contents.keys():
fp = os.path.join(self.attachment_path, self.contents[k])
f = open(fp, 'rb')
v = f.read()
f.close()
logg.debug('writing attachment {}'.format(k))
writer.write(k, v)
for key, value in self.contents.items():
fp = os.path.join(self.attachment_path, value)
with open(fp, "rb") as f:
data = f.read()
logg.debug(f"writing attachment {key}")
writer.write(key, data)
def __str__(self):
s = ''
for k in self.contents.keys():
s += '{} = {}\n'.format(k, self.contents[k]) #self.digests[i].hex(), self.contents[i])
s = ""
for key, value in self.contents.items():
s += f"{key} = {value}\n" # self.digests[i].hex(), self.contents[i])
return s

View File

@@ -0,0 +1,163 @@
from __future__ import annotations
# standard imports
import os
import json
import logging
import base64
from typing import TYPE_CHECKING
# external imports
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
from hexathon import strip_0x
# local imports
from cic.contract.base import Data, data_dir
from cic.writers import OutputWriter
from cic_types.ext.metadata import MetadataRequestsHandler
from cic.utils import object_to_str
logg = logging.getLogger(__name__)
class Meta(Data):
"""Serialize and publish metadata for token.
The token metadata is any mutable data that is not part of the initial token proof, but published simultaneously as the token nonetheless.
:param path: Path to settings directory
:type path: str
:param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter
"""
def __init__(
self, path=".", writer=None, name="", location="", country_code="KE", contact={}, interactive=False
):
super(Meta, self).__init__()
self.name = name
self.contact = contact
self.country_code = country_code
self.location = location
self.path = path
self.writer = writer
self.meta_path = os.path.join(self.path, "meta.json")
if interactive:
self.name = input(f"Enter Metadata Name ({self.name}): ") or self.name
self.country_code = input(f"Enter Metadata Country Code ({self.country_code}): ") or self.country_code
self.location = input(f"Enter Metadata Location ({self.location}): ") or self.location
adding_contact_info = True
contact = {}
while adding_contact_info:
value = input("Enter Metadata contact info (e.g 'phone: +254723522718'): ") or None
if value:
data = value.split(":")
if len(data) != 2:
print("Invalid contact info, you must enter in the format 'key: value'")
continue
contact[data[0].strip()] = data[1].strip()
else:
adding_contact_info = False
self.contact = contact
def load(self):
"""Load metadata from settings."""
super(Meta, self).load()
f = open(self.meta_path, "r", encoding="utf-8")
o = json.load(f)
f.close()
self.name = o["name"]
self.contact = o["contact"]
self.country_code = o["country_code"]
self.location = o["location"]
self.inited = True
def start(self):
"""Initialize metadata settings from template."""
super(Meta, self).start()
meta_template_file_path = os.path.join(
data_dir, f"meta_template_v{self.version()}.json"
)
f = open(meta_template_file_path, encoding="utf-8")
o = json.load(f)
f.close()
o["name"] = self.name
o["contact"] = self.contact
o["country_code"] = self.country_code
o["location"] = self.location
f = open(self.meta_path, "w", encoding="utf-8")
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def reference(self, token_address):
"""Calculate the mutable reference for the token metadata."""
token_address_bytes = bytes.fromhex(strip_0x(token_address))
return generate_metadata_pointer(
token_address_bytes, MetadataPointer.TOKEN_META
)
def asdict(self):
"""Output proof state to dict."""
return {
"name": self.name,
"country_code": self.country_code,
"location": self.location,
"contact": self.contact,
}
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish metadata.
See cic.processor.Processor.process
"""
if writer is None:
writer = self.writer
v = json.dumps(self.asdict(), separators=(",", ":"))
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
writer.write(k, v.encode("utf-8"))
token_symbol_bytes = token_symbol.encode("utf-8")
k = generate_metadata_pointer(
token_symbol_bytes, MetadataPointer.TOKEN_META_SYMBOL
)
writer.write(k, v.encode("utf-8"))
return (k, v)
def __str__(self):
return object_to_str(self, ["name", "contact", "country_code", "location"])
class MetadataWriter(OutputWriter):
"""Custom writer for publishing data under immutable content-addressed pointers in the cic-meta storage backend.
Data that is not utf-8 will be converted to base64 before publishing.
Implements cic.writers.OutputWriter
"""
def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try:
v = v.decode("utf-8")
v = json.loads(v)
logg.debug(f"metadatawriter bindecode {k} {v}")
except UnicodeDecodeError:
v = base64.b64encode(v).decode("utf-8")
v = json.loads(json.dumps(v, separators=(",", ":")))
logg.debug(f"metadatawriter b64encode {k} {v}")
r = rq.create(v)
logg.info(f"metadata submitted at {k}")
return r

View File

@@ -7,10 +7,7 @@ import logging
from chainlib.chain import ChainSpec
# local imports
from .base import (
Data,
data_dir,
)
from cic.contract.components.base import Data, data_dir
logg = logging.getLogger(__name__)
@@ -54,7 +51,7 @@ class Network(Data):
"""
super(Network, self).load()
network_template_file_path = os.path.join(data_dir, 'network_template_v{}.json'.format(self.version()))
network_template_file_path = os.path.join(data_dir, f'network_template_v{self.version()}.json')
f = open(network_template_file_path)
o_part = json.load(f)
@@ -138,11 +135,11 @@ class Network(Data):
def __str__(self):
s = ''
for k in self.resources.keys():
for kk in self.resources[k]['contents'].keys():
v = self.resources[k]['contents'][kk]
if v == None:
v = ''
s += '{}.{} = {}\n'.format(k, kk, v)
for resource in self.resources.keys():
for content_key in self.resources[resource]['contents'].keys():
content_value = self.resources[resource]['contents'][content_key]
if content_value == None:
content_value = ''
s += f'{resource}.{content_key} = {content_value}\n'
return s

View File

@@ -0,0 +1,192 @@
# standard imports
import json
import logging
import os
import tempfile
# external imports
from hexathon import strip_0x
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
# local imports
from cic.contract.base import Data, data_dir
from cic.utils import object_to_str
logg = logging.getLogger(__name__)
class Proof(Data):
"""Proof handles the immutable token proof data mapped to the initial token deployment.
It processes inputs from the proof.json file in the session directory.
Optionally, attachment objects can be added to the proof. If added, the resulting proof digest will consists of the attachment digests added to the root digest. These are then are deterministically ordered, regardless of which order attachments were given to the constructor.
:param path: Path to settings directory
:type path: str
:param attachments: List of attachment objects to include in the proof
:type attachments: cic.attachment.Attachment
:param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter
"""
def __init__(
self,
path=".",
description="",
namespace="ge",
issuer="",
attachments=None,
writer=None,
interactive=False,
):
super(Proof, self).__init__()
self.proofs = []
self.namespace = namespace
self.description = description
self.issuer = issuer
self.path = path
self.writer = writer
self.extra_attachments = attachments
self.attachments = {}
self.proof_path = os.path.join(self.path, "proof.json")
self.temp_proof_path = tempfile.mkstemp()[1]
if interactive:
self.description = (
input(f"Enter Proof Description ({self.description}): ") or self.description
)
self.namespace = (
input(f"Enter Proof Namespace ({self.namespace}): ") or self.namespace
)
self.issuer = input(f"Enter Proof Issuer ({self.issuer}): ") or self.issuer
def load(self):
"""Load proof data from settings."""
super(Proof, self).load()
f = open(self.proof_path, "r")
o = json.load(f)
f.close()
self.set_version(o["version"])
self.description = o["description"]
self.namespace = o["namespace"]
self.issuer = o["issuer"]
self.proofs = o["proofs"]
if self.extra_attachments != None:
a = self.extra_attachments.asdict()
for k in a.keys():
self.attachments[k] = a[k]
hshs = self.__get_ordered_hashes()
self.proofs = list(map(strip_0x, hshs))
self.inited = True
def start(self):
"""Initialize proof settings from template."""
super(Proof, self).start()
proof_template_file_path = os.path.join(
data_dir, f"proof_template_v{self.version()}.json"
)
with open(proof_template_file_path, "r", encoding="utf-8") as f:
o = json.load(f)
o["issuer"] = self.issuer
o["description"] = self.description
o["namespace"] = self.namespace
with open(self.proof_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
def asdict(self):
"""Output proof state to dict."""
return {
"version": self.version(),
"namespace": self.namespace,
"description": self.description,
"issuer": self.issuer,
"proofs": self.proofs,
}
# TODO: the efficiency of this method could probably be improved.
def __get_ordered_hashes(self):
ks = list(self.attachments.keys())
ks.sort()
return ks
# def get(self):
# hsh = self.hash(b).hex()
# self.attachments[hsh] = self.temp_proof_path
# logg.debug('cbor of {} is {} hashes to {}'.format(v, b.hex(), hsh))
def root(self):
"""Calculate the root digest from the serialized proof object."""
v = self.asdict()
# b = cbor2.dumps(v)
b = json.dumps(v, separators=(",", ":"))
with open(self.temp_proof_path, "w", encoding="utf-8") as f:
f.write(b)
b = b.encode("utf-8")
k = self.hash(b)
return (k.hex(), b)
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish proof.
See cic.processor.Processor.process
"""
if writer is None:
writer = self.writer
(k, v) = self.root()
writer.write(k, v)
root_key = k
token_symbol_bytes = token_symbol.encode("utf-8")
k = generate_metadata_pointer(
token_symbol_bytes, MetadataPointer.TOKEN_PROOF_SYMBOL
)
writer.write(k, v)
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_PROOF)
writer.write(k, v)
# (hsh, hshs) = self.get()
# hshs = list(map(strip_0x, hshs))
# hshs_bin = list(map(bytes.fromhex, hshs))
# hshs_cat = b''.join(hshs_bin)
# f = open(self.temp_proof_path, 'rb')
# v = f.read()
# f.close()
# writer.write(hsh, v)
# r = self.hash(hshs_cat)
# r_hex = r.hex()
# logg.debug('generated proof {} for hashes {}'.format(r_hex, hshs))
# writer.write(r_hex, hshs_cat)
o = self.asdict()
with open(self.proof_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
return root_key
def __str__(self):
return object_to_str(
self, ["description", "issuer", "namespace", "version()", "proofs"]
)

View File

@@ -0,0 +1,123 @@
# standard imports
import json
import os
# local imports
from cic.contract.base import Data, data_dir
from cic.contract.helpers import select_contract
class Token(Data):
"""Encapsulates the token data used by the extension to deploy and/or register token and token related applications on chain.
Token details (name, symbol etc) will be used to initialize the token settings when start is called. If load is called instead, any token detail parameters passed to the constructor will be overwritten by data stored in the settings.
:param path: Settings directory path
:type path: str
:param name: Token name
:type name: str
:param symbol: Token symbol
:type symbol: str
:param precision: Token value precision (number of decimals)
:type precision: int
:param supply: Token supply (in smallest precision units)
:type supply: int
:param code: Bytecode for token chain application
:type code: str (hex)
"""
def __init__(
self,
path=".",
name="Foo Token",
symbol="FOO",
precision=6,
supply=0,
code=None,
extra_args=[],
extra_args_types=[],
interactive=False,
):
super(Token, self).__init__()
self.name = name
self.symbol = symbol
self.supply = supply
self.precision = precision
self.code = code
self.extra_args = extra_args
self.extra_args_types = extra_args_types
self.path = path
self.token_path = os.path.join(self.path, "token.json")
if interactive:
contract = select_contract()
self.code = contract["bin_path"]
self.extra_args = contract["extra_args"]
self.extra_args_types = contract["extra_args_types"]
self.name = input(f"Enter Token Name ({self.name}): ") or self.name
self.symbol = input(f"Enter Token Symbol ({self.symbol}): ") or self.symbol
self.precision = input(f"Enter Token Precision ({self.precision}): ") or self.precision
self.supply = input(f"Enter Token Supply ({self.supply}): ") or self.supply
def load(self):
"""Load token data from settings."""
super(Token, self).load()
with open(self.token_path, "r", encoding="utf-8") as f:
o = json.load(f)
self.name = o["name"]
self.symbol = o["symbol"]
self.precision = o["precision"]
self.code = o["code"]
self.supply = o["supply"]
extras = []
extra_types = []
token_extras: list = o["extra"]
if token_extras:
for idx, token_extra in enumerate(token_extras):
arg = token_extra.get("arg")
arg_type = token_extra.get("arg_type")
if arg and arg_type:
extras.append(arg)
extra_types.append(arg_type)
elif (arg and not arg_type) or (not arg and arg_type):
raise ValueError(
f"Extra contract args must have a 'arg' and 'arg_type', Please check {self.token_path}:extra[{idx}] "
)
self.extra_args = extras
self.extra_args_types = extra_types
self.inited = True
def start(self):
"""Initialize token settings from arguments passed to the constructor and/or template."""
super(Token, self).load()
token_template_file_path = os.path.join(
data_dir, f"token_template_v{self.version()}.json"
)
with open(token_template_file_path, encoding="utf-8") as f:
o = json.load(f)
o["name"] = self.name
o["symbol"] = self.symbol
o["precision"] = self.precision
o["code"] = self.code
o["supply"] = self.supply
extra = []
for idx, extra_arg in enumerate(self.extra_args):
extra.append({"arg": extra_arg, "arg_type": self.extra_args_types[idx]})
if len(extra) != 0:
o["extra"] = extra
with open(self.token_path, "w", encoding="utf-8") as f:
json.dump(o, f, sort_keys=True, indent="\t")
def __str__(self):
s = f"name = {self.name}\n"
s += f"symbol = {self.symbol}\n"
s += f"precision = {self.precision}\n"
s += f"supply = {self.supply}\n"
for idx, extra in enumerate(self.extra_args):
s += f"extra_args[{idx}]({self.extra_args_types[idx]}) = {extra}\n"
return s

191
cic/contract/contract.py Normal file
View File

@@ -0,0 +1,191 @@
# Standard
import importlib
import json
import logging
import os
from typing import List, TYPE_CHECKING
import requests
# external imports
from cic_types.ext.metadata import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner
from chainlib.cli.config import Config
# Local Modules
from cic.contract import ContractProcessor
from cic.contract.components.attachment import Attachment
from cic.contract.components.meta import Meta
from cic.contract.components.network import Network
from cic.contract.components.proof import Proof
from cic.contract.components.token import Token
from cic.contract.helpers import init_writers_from_config
from cic.writers import HTTPWriter, KeyedWriterFactory, OutputWriter
log = logging.getLogger(__name__)
class Contract:
""" """
def __init__(
self,
token: Token,
proof: Proof,
meta: Meta,
attachment: Attachment,
network: Network,
):
self.token = token
self.proof = proof
self.meta = meta
self.attachment = attachment
self.network = network
def __str__(self):
s = ""
s += f"[cic.header]\nversion = {self.proof.version()}\n"
s += f"[cic.token]\n{self.token}"
s += f"[cic.proof]\n{self.proof}"
s += f"[cic.meta]\n{self.meta}"
s += f"[cic.attachment]\n{self.attachment}"
s += f"[cic.network]\n{self.network}"
return s
def load_contract(directory) -> Contract:
token = Token(path=directory)
proof = Proof(path=directory)
meta = Meta(path=directory)
attachment = Attachment(path=directory)
network = Network(directory)
token.load()
proof.load()
meta.load()
attachment.load()
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def generate_contract(
directory: str, targets: List[str], config, interactive=True
) -> Contract:
if os.path.exists(directory):
contine = input(
"Directory already exists, Would you like to delete it? (y/n): "
)
if contine.lower() != "y":
print("Exiting")
exit(1)
else:
print(f"Deleted {directory}")
os.system(f"rm -rf {directory}")
os.makedirs(directory)
token = Token(directory, interactive=interactive)
token.start()
proof = Proof(directory, interactive=interactive)
proof.start()
meta = Meta(directory, interactive=interactive)
meta.start()
attachment = Attachment(directory, interactive=interactive)
network = Network(directory, targets=targets)
network.start()
for target in targets:
m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start(
network,
registry_address=config.get("CIC_REGISTRY_ADDRESS"),
chain_spec=config.get("CHAIN_SPEC"),
rpc_provider=config.get("RPC_PROVIDER"),
key_account_address=config.get("RPC_PROVIDER"),
)
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def deploy(
config: Config,
target: str,
contract_directory: str,
):
modname = f"cic.ext.{target}"
cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config)
output_directory = os.path.join(contract_directory, "out")
output_writer_path_meta = output_directory
metadata_endpoint = config.get("META_URL")
if metadata_endpoint is not None:
MetadataRequestsHandler.base_url = metadata_endpoint
MetadataSigner.gpg_path = "/tmp"
MetadataSigner.key_file_path = config.get("AUTH_KEYFILE")
MetadataSigner.gpg_passphrase = config.get("AUTH_PASSPHRASE")
writers["proof"] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new
writers["attachment"] = KeyedWriterFactory(None, HTTPWriter).new
writers["meta"] = MetadataWriter
output_writer_path_meta = metadata_endpoint
ct = Token(path=contract_directory)
cm = Meta(
path=contract_directory, writer=writers["meta"](path=output_writer_path_meta)
)
ca = Attachment(
path=contract_directory,
writer=writers["attachment"](path=output_writer_path_meta),
)
cp = Proof(
path=contract_directory,
attachments=ca,
writer=writers["proof"](path=output_writer_path_meta),
)
cn = Network(path=contract_directory)
ca.load()
ct.load()
cp.load()
cm.load()
cn.load()
chain_spec = None
try:
chain_spec = config.get("CHAIN_SPEC")
log.debug(f"using CHAIN_SPEC from config: {chain_spec}")
except KeyError:
chain_spec = cn.chain_spec
config.add(chain_spec, "CHAIN_SPEC", exists_ok=True)
log.debug(f"using CHAIN_SPEC: {str(chain_spec)} from network")
signer_hint = config.get("WALLET_KEY_FILE")
(rpc, signer) = cmd_mod.parse_adapter(config, signer_hint)
target_network_reference = cn.resource(target)
chain_spec = cn.chain_spec(target)
log.debug(
f'found reference {target_network_reference["contents"]} chain spec {chain_spec} for target {target}'
)
c = getattr(cmd_mod, "new")(
chain_spec,
target_network_reference["contents"],
cp,
signer_hint=signer,
rpc=rpc,
outputs_writer=writers["ext"](path=output_directory),
)
c.apply_token(ct)
p = ContractProcessor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p.process()

120
cic/contract/helpers.py Normal file
View File

@@ -0,0 +1,120 @@
# standard imports
import os
import logging
import sys
import json
import requests
# local imports
from cic.writers import OutputWriter
log = logging.getLogger(__name__)
CONTRACTS = [
{
"url": "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken",
"name": "Giftable Token",
},
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap",
"name": "Demurrage Token Single No Cap",
},
]
# Download File from Url
def download_file(url: str, directory: str, filename=None) -> (str, bytes):
os.makedirs(directory, exist_ok=True)
filename = filename if filename else url.split("/")[-1]
path = os.path.join(directory, filename)
if not os.path.exists(path):
log.debug(f"Downloading {filename}")
r = requests.get(url, allow_redirects=True)
open(path, "wb").write(r.content)
return path
return path
def get_contract_args(data: list):
for item in data:
if item["type"] == "constructor":
return item["inputs"]
raise Exception("No constructor found in contract")
def select_contract():
print("Contracts:")
print("\t C - Custom (path/url to contract)")
for idx, contract in enumerate(CONTRACTS):
print(f"\t {idx} - {contract['name']}")
val = input("Select contract (C,0,1..): ")
if val.isdigit() and int(val) < len(CONTRACTS):
contract = CONTRACTS[int(val)]
directory = f"./contracts/{contract['name']}"
bin_path = os.path.abspath(download_file(contract["url"] + ".bin", directory))
json_path = download_file(contract["url"] + ".json", directory)
elif val == "C":
possible_bin_location = input("Enter a path or url to a contract.bin: ")
if possible_bin_location.startswith('http'):
# possible_bin_location is url
bin_path = download_file(possible_bin_location, directory)
else:
# possible_bin_location is path
if os.path.exists(possible_bin_location):
bin_path = os.path.abspath(possible_bin_location)
else:
raise Exception(f"File {possible_bin_location} does not exist")
possible_json_path = val.replace(".bin", ".json")
if os.path.exists(possible_json_path):
json_path = possible_json_path
else:
print("Invalid selection")
sys.exit(1)
contract_extra_args = []
contract_extra_args_types = []
if os.path.exists(json_path):
with open(json_path, encoding="utf-8") as f:
json_data = json.load(f)
for contract_arg in get_contract_args(json_data):
arg_name = contract_arg.get("name")
arg_type = contract_arg.get("type")
if arg_name not in ["_decimals", "_name", "_symbol"]:
val = input(f"Enter value for {arg_name} ({arg_type}): ")
contract_extra_args.append(val)
if arg_type == "uint128":
contract_extra_args_types.append("uint256")
else:
contract_extra_args_types.append(arg_type)
return {
"bin_path": bin_path,
"json_path": json_path,
"extra_args": contract_extra_args,
"extra_args_types": contract_extra_args_types,
}
Writers = {
"meta": OutputWriter,
"attachment": OutputWriter,
"proof": OutputWriter,
"ext": OutputWriter,
}
def init_writers_from_config(config) -> Writers:
writers: Writers = {
"meta": None,
"attachment": None,
"proof": None,
"ext": None,
}
for key in writers:
writer_config_name = f"CIC_CORE_{key.upper()}_WRITER"
(module_name, attribute_name) = config.get(writer_config_name).rsplit(".", maxsplit=1)
mod = importlib.import_module(module_name)
writer = getattr(mod, attribute_name)
writers[key] = writer
return writers

View File

@@ -4,7 +4,7 @@ import logging
logg = logging.getLogger(__name__)
class Processor:
class ContractProcessor:
"""Drives the serialization and publishing of contracts, proofs and metadata for the token.
:param proof: Proof object to publish
@@ -14,31 +14,37 @@ class Processor:
:param metadata: Metadata object to publish
:type metadata: cic.meta.Meta
:param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
:param extensions: Extension contexts to publish to
:type extensions: list of cic.extension.Extension
"""
def __init__(self, proof=None, attachment=None, metadata=None, outputs_writer=None, extensions=[]):
def __init__(
self,
proof=None,
attachment=None,
metadata=None,
outputs_writer=None,
extensions=[],
):
self.token_address = None
self.extensions = extensions
self.cores = {
'metadata': metadata,
'attachment': attachment,
'proof': proof,
}
"metadata": metadata,
"attachment": attachment,
"proof": proof,
}
self.outputs = []
self.__outputs_writer = outputs_writer
def writer(self):
"""Return the writer instance that the process is using.
:rtype: cic.output.OutputWriter
:rtype: cic.writers.OutputWriter
:return: Writer
"""
return self.__outputs_writer
def get_outputs(self):
"""Return all written outputs.
@@ -53,7 +59,6 @@ class Processor:
outputs += self.outputs
return outputs
def process(self, writer=None):
"""Serializes and publishes all token data.
@@ -62,22 +67,26 @@ class Processor:
All output written to the publish writer will also be cached so that it subsequently be recalled using the get_outputs method.
:param writer: Writer to use for publishing.
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
tasks = [
'attachment',
'proof',
'metadata',
]
"attachment",
"proof",
"metadata",
]
for ext in self.extensions:
(token_address, token_symbol) = ext.process()
for task in tasks:
a = self.cores.get(task)
if a == None:
logg.debug('skipping missing task receiver "{}"'.format(task))
if a is None:
logg.debug(f'skipping missing task receiver "{task}"')
continue
v = a.process(token_address=token_address, token_symbol=token_symbol, writer=self.__outputs_writer)
v = a.process(
token_address=token_address,
token_symbol=token_symbol,
writer=self.__outputs_writer,
)
self.outputs.append(v)

View File

@@ -1,5 +1,20 @@
[cic_core]
meta_writer = cic.output.KVWriter
attachment_writer = cic.output.KVWriter
proof_writer = cic.output.KVWriter
ext_writer = cic.output.KVWriter
meta_writer = cic.writers.KVWriter
attachment_writer = cic.writers.KVWriter
proof_writer = cic.writers.KVWriter
ext_writer = cic.writers.KVWriter
[cic]
registry_address = 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
[meta]
url = http://localhost:63380
http_origin =
[auth]
type = gnupg
db_path = /home/will/.local/share/cic/clicada
keyfile_path = /home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc
key = CCE2E1D2D0E36ADE0405E2D0995BB21816313BD5
passphrase = merman

View File

@@ -4,5 +4,10 @@
"precision": 0,
"code": null,
"supply": 0,
"extra": {}
"extra": [
{
"arg": "",
"arg_type": ""
}
]
}

View File

@@ -1,43 +1,41 @@
# standard imports
import logging
import copy
import json
import logging
# external imports
from chainlib.chain import ChainSpec
from chainlib.eth.tx import (
TxFormat,
TxFactory,
Tx,
receipt,
)
from chainlib.eth.address import is_address, to_checksum_address
from chainlib.eth.connection import RPCConnection
from chainlib.eth.contract import (
ABIContractEncoder,
ABIContractType
)
from chainlib.eth.contract import ABIContractEncoder, ABIContractType
from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.address import (
is_address,
to_checksum_address,
)
from hexathon import add_0x
from eth_token_index import TokenUniqueSymbolIndex
from chainlib.eth.tx import Tx, TxFactory, TxFormat, receipt
from eth_address_declarator import Declarator
from eth_address_declarator.declarator import AddressDeclarator
from eth_token_index import TokenUniqueSymbolIndex
from giftable_erc20_token import GiftableToken
from hexathon import add_0x, strip_0x
# local imports
from cic.ext.eth.rpc import parse_adapter
from cic.extension import Extension
logg = logging.getLogger(__name__)
class CICEth(Extension):
def __init__(self, chain_spec, resources, proof, signer=None, rpc=None, outputs_writer=None, fee_oracle=None):
def __init__(
self,
chain_spec,
resources,
proof,
signer=None,
rpc=None,
outputs_writer=None,
fee_oracle=None,
):
"""Implementation for the eth extension.
@@ -54,19 +52,25 @@ class CICEth(Extension):
:param rpc: RPC adapter capable of submitting and querying the chain network node
:type rpc: chainlib.connection.RPCConnection
:param outputs_writer: Writer interface receiving the output of the processor
:type outputs_writer: cic.output.OutputWriter
:type outputs_writer: cic.writers.OutputWriter
:param fee_oracle: Fee oracle required by signer
:type fee_oracle: chainlib.fee.FeeOracle
"""
super(CICEth, self).__init__(chain_spec, resources, proof, signer=signer, rpc=rpc, outputs_writer=outputs_writer)
super(CICEth, self).__init__(
chain_spec,
resources,
proof,
signer=signer,
rpc=rpc,
outputs_writer=outputs_writer,
)
self.fee_oracle = fee_oracle
self.tx_format = TxFormat.RAW_ARGS
if self.rpc != None:
if self.rpc is not None:
self.tx_format = TxFormat.JSONRPC
elif self.signer != None:
elif self.signer is not None:
self.tx_format = TxFormat.RLP_SIGNED
def __detect_arg_type(self, v):
typ = None
try:
@@ -74,59 +78,59 @@ class CICEth(Extension):
typ = ABIContractType.UINT256
except TypeError:
pass
if typ == None:
if typ is None:
try:
vv = strip_0x(v)
if is_address(vv):
typ = ABIContractType.ADDRESS
typ = ABIContractType.ADDRESS
else:
typ = ABIContractType.BYTES32
typ = ABIContractType.BYTES32
except ValueError:
pass
if typ == None:
if typ is None:
try:
v.encode('utf-8')
typ = ABIContractType.STRING
v.encode("utf-8")
typ = ABIContractType.STRING
except ValueError:
pass
if typ == None:
raise ValueError('cannot automatically determine type for value {}'.format(v))
if typ is None:
raise ValueError(
f"cannot automatically determine type for value {v}"
)
logg.info('argument {} parsed as abi contract type {}'.format(typ.value))
logg.info(f"argument {v} parsed as abi contract type {typ.value}")
return typ
def __order_args(self):
args = [
self.token_details['name'],
self.token_details['symbol'],
self.token_details['precision'],
]
self.token_details["name"],
self.token_details["symbol"],
self.token_details["precision"],
]
args_types = [
ABIContractType.STRING.value,
ABIContractType.STRING.value,
ABIContractType.UINT256.value,
]
ABIContractType.STRING.value,
ABIContractType.STRING.value,
ABIContractType.UINT256.value,
]
for i, x in enumerate(self.token_details['extra']):
for i, x in enumerate(self.token_details["extra"]):
args.append(x)
typ = None
if self.token_details['extra_types'] != None:
typ = self.token_details['extra_types'][i]
if self.token_details["extra_types"] is not None:
typ = self.token_details["extra_types"][i]
else:
typ = self.__detect_arg_type(x)
args_types.append(typ)
positions = self.token_details['positions']
if positions == None:
positions = self.token_details["positions"]
if positions is None:
positions = list(range(len(args)))
return (args, args_types, positions)
return (args, args_types, positions)
def add_outputs(self, k, v):
"""Adds given key/value pair to outputs array.
@@ -136,10 +140,9 @@ class CICEth(Extension):
:param v: Output value
:param v: bytes or str
"""
logg.debug('adding outputs {} {}'.format(k, v))
logg.debug(f"adding outputs {k} {v}")
self.outputs.append((k, v))
def get_outputs(self):
"""Get wrapper for outputs captured from processing.
@@ -148,14 +151,13 @@ class CICEth(Extension):
"""
return self.outputs
def process_token(self, writer=None):
"""Deploy token, and optionally mint token supply to token deployer account.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
if writer == None:
if writer is None:
writer = self.outputs_writer
(args, args_types, positions) = self.__order_args()
@@ -163,143 +165,189 @@ class CICEth(Extension):
enc = ABIContractEncoder()
for i in positions:
getattr(enc, args_types[i])(args[i])
getattr(enc, args_types[i])(args[i])
code = enc.get()
if self.token_code != None:
if self.token_code is not None:
code = self.token_code + code
logg.debug('resource {}'.format(self.resources))
signer_address = add_0x(to_checksum_address(self.resources['token']['key_account']))
logg.debug(f"resource {self.resources}")
signer_address = add_0x(
to_checksum_address(self.resources["token"]["key_account"])
)
nonce_oracle = None
if self.rpc != None:
if self.rpc is not None:
nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc)
c = TxFactory(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle)
c = TxFactory(
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=self.fee_oracle,
)
tx = c.template(signer_address, None, use_nonce=True)
tx = c.set_code(tx, code)
o = c.finalize(tx, self.tx_format)
token_address_tx = None
r = None
if self.rpc != None:
if self.rpc is not None:
r = self.rpc.do(o[1])
token_address_tx = r
o = self.rpc.wait(r)
o = Tx.src_normalize(o)
self.token_address = o['contract_address']
elif self.signer != None:
self.token_address = o["contract_address"]
elif self.signer is not None:
r = o[1]
token_address_tx = r
if r == None:
if r is None:
r = code
writer.write('token', r.encode('utf-8'))
writer.write('token_address', self.token_address.encode('utf-8'))
self.add_outputs('token', r)
writer.write("token", r.encode("utf-8"))
writer.write("token_address", self.token_address.encode("utf-8"))
self.add_outputs("token", r)
if self.token_details['supply'] > 0:
c = GiftableToken(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle)
o = c.mint_to(self.token_address, self.resources['token']['key_account'], self.resources['token']['key_account'], self.token_details['supply'])
if int(self.token_details["supply"]) > 0:
c = GiftableToken(
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=self.fee_oracle,
)
o = c.mint_to(
self.token_address,
self.resources["token"]["key_account"],
self.resources["token"]["key_account"],
self.token_details["supply"],
)
r = None
if self.rpc != None:
if self.rpc is not None:
r = self.rpc.do(o[1])
self.rpc.wait(r)
writer.write('token_supply', r.encode('utf-8'))
elif self.signer != None:
writer.write("token_supply", r.encode("utf-8"))
elif self.signer is not None:
r = o[1]
writer.write('token_supply', json.dumps(r).encode('utf-8'))
writer.write(
"token_supply", json.dumps(r, separators=(",", ":")).encode("utf-8")
)
else:
r = o
writer.write('token_supply', r.encode('utf-8'))
writer.write("token_supply", r.encode("utf-8"))
return token_address_tx
def process_token_index(self, writer=None):
"""Register deployed token with token index.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
if writer == None:
if writer is None:
writer = self.outputs_writer
signer_address = add_0x(to_checksum_address(self.resources['token_index']['key_account']))
contract_address = add_0x(to_checksum_address(self.resources['token_index']['reference']))
signer_address = add_0x(
to_checksum_address(self.resources["token_index"]["key_account"])
)
contract_address = add_0x(
to_checksum_address(self.resources["token_index"]["reference"])
)
gas_oracle = OverrideGasOracle(limit=TokenUniqueSymbolIndex.gas(), conn=self.rpc)
gas_oracle = OverrideGasOracle(
limit=TokenUniqueSymbolIndex.gas(), conn=self.rpc
)
nonce_oracle = None
if self.rpc != None:
if self.rpc is not None:
nonce_oracle = RPCNonceOracle(add_0x(signer_address), conn=self.rpc)
c = TokenUniqueSymbolIndex(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
o = c.register(contract_address, signer_address, self.token_address, tx_format=self.tx_format)
c = TokenUniqueSymbolIndex(
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=gas_oracle,
)
o = c.register(
contract_address,
signer_address,
self.token_address,
tx_format=self.tx_format,
)
r = None
if self.rpc != None:
if self.rpc is not None:
r = self.rpc.do(o[1])
self.rpc.wait(r)
elif self.signer != None:
elif self.signer is not None:
r = o[1]
else:
r = o
writer.write('token_index', r.encode('utf-8'))
self.add_outputs('token_index', r)
writer.write("token_index", r.encode("utf-8"))
self.add_outputs("token_index", r)
return r
def process_address_declarator(self, writer=None):
"""Register token proofs with address declarator.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
if writer == None:
if writer is None:
writer = self.outputs_writer
signer_address = add_0x(to_checksum_address(self.resources['address_declarator']['key_account']))
contract_address = add_0x(to_checksum_address(self.resources['address_declarator']['reference']))
signer_address = add_0x(
to_checksum_address(self.resources["address_declarator"]["key_account"])
)
contract_address = add_0x(
to_checksum_address(self.resources["address_declarator"]["reference"])
)
gas_oracle = OverrideGasOracle(limit=AddressDeclarator.gas(), conn=self.rpc)
nonce_oracle = None
if self.rpc != None:
if self.rpc is not None:
nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc)
c = Declarator(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
c = Declarator(
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=gas_oracle,
)
results = []
#(main_proof, all_proofs) = self.proof.get()
# (main_proof, all_proofs) = self.proof.get()
#for proof in all_proofs:
#logg.debug('proof {} '.format(proof))
# for proof in all_proofs:
# logg.debug('proof {} '.format(proof))
(k, v) = self.proof.root()
fk = 'address_declarator_' + k
o = c.add_declaration(contract_address, signer_address, self.token_address, k, tx_format=self.tx_format)
fk = "address_declarator_" + k
o = c.add_declaration(
contract_address,
signer_address,
self.token_address,
k,
tx_format=self.tx_format,
)
r = None
if self.rpc != None:
if self.rpc is not None:
r = self.rpc.do(o[1])
self.rpc.wait(r)
elif self.signer != None:
elif self.signer is not None:
r = o[1]
else:
r = o
self.add_outputs(fk, r)
results.append(r)
v = r.encode('utf-8')
if writer != None:
v = r.encode("utf-8")
if writer is not None:
writer.write(fk, v)
return results
def prepare_extension(self):
"""Sets token address for extension if defined in settings.
"""
"""Sets token address for extension if defined in settings."""
super(CICEth, self).prepare_extension()
if self.token_address != None:
if self.token_address is not None:
self.token_address = add_0x(to_checksum_address(self.token_address))
@@ -308,4 +356,11 @@ def new(chain_spec, resources, proof, signer_hint=None, rpc=None, outputs_writer
See CICEth constructor for details.
"""
return CICEth(chain_spec, resources, proof, signer=signer_hint, rpc=rpc, outputs_writer=outputs_writer)
return CICEth(
chain_spec,
resources,
proof,
signer=signer_hint,
rpc=rpc,
outputs_writer=outputs_writer,
)

View File

@@ -20,12 +20,13 @@ class EthKeystoreDirectory(DictKeystore, KeystoreDirectory):
TODO: Move to funga
"""
pass
def parse_adapter(config, signer_hint):
"""Determine and instantiate signer and rpc from configuration.
If either could not be determined, None is returned.
:param config: Configuration object implementing the get() method
@@ -36,12 +37,12 @@ def parse_adapter(config, signer_hint):
:return: RPC interface, signer interface
"""
keystore = None
if signer_hint == None:
logg.info('signer hint missing')
if signer_hint is None:
logg.info("signer hint missing")
return None
st = os.stat(signer_hint)
if stat.S_ISDIR(st.st_mode):
logg.debug('signer hint is directory')
logg.debug("signer hint is directory")
keystore = EthKeystoreDirectory()
keystore.process_dir(signer_hint)

View File

@@ -10,6 +10,7 @@ def extension_start(network, *args, **kwargs):
:type network: cic.network.Network
"""
CICRegistry.address = kwargs['registry_address']
key_account_address = kwargs['key_account_address'] or ''
RPCConnection.register_location(kwargs['rpc_provider'], kwargs['chain_spec'])
conn = RPCConnection.connect(kwargs['chain_spec'])
@@ -17,10 +18,13 @@ def extension_start(network, *args, **kwargs):
registry = CICRegistry(kwargs['chain_spec'], conn)
address_declarator = registry.by_name('AddressDeclarator')
network.resource_set('eth', 'address_declarator', address_declarator)
network.resource_set('eth', 'address_declarator', address_declarator, key_account=key_account_address)
token_index = registry.by_name('TokenRegistry')
network.resource_set('eth', 'token_index', token_index)
network.resource_set('eth', 'token_index', token_index, key_account=key_account_address)
network.resource_set('eth', 'token', None, key_account=key_account_address)
network.set('eth', kwargs['chain_spec'])
network.save()

View File

@@ -5,7 +5,8 @@ import logging
from hexathon import valid as valid_hex
# local imports
from cic.output import StdoutWriter
from cic.writers import StdoutWriter
from cic.contract.components.token import Token
logg = logging.getLogger(__name__)
@@ -24,9 +25,18 @@ class Extension:
:param rpc: RPC adapter capable of submitting and querying the chain network node
:type rpc: chainlib.connection.RPCConnection
:param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
"""
def __init__(self, chain_spec, resources, proof, signer=None, rpc=None, outputs_writer=StdoutWriter()):
def __init__(
self,
chain_spec,
resources,
proof,
signer=None,
rpc=None,
outputs_writer=StdoutWriter(),
):
self.resources = resources
self.proof = proof
self.chain_spec = chain_spec
@@ -38,9 +48,8 @@ class Extension:
self.outputs = []
self.outputs_writer = outputs_writer
# TODO: apply / prepare token can be factored out
def apply_token(self, token):
def apply_token(self, token: Token):
"""Initialize extension with token data from settings.
:param token: Token object
@@ -48,10 +57,27 @@ class Extension:
:rtype: dict
:returns: Token data state of extension after load
"""
return self.prepare_token(token.name, token.symbol, token.precision, token.code, token.supply)
return self.prepare_token(
token.name,
token.symbol,
token.precision,
token.code,
token.supply,
token.extra_args,
token.extra_args_types,
)
def prepare_token(self, name, symbol, precision, code, supply, extra=[], extra_types=[], positions=None):
def prepare_token(
self,
name,
symbol,
precision,
code,
supply,
extra=None,
extra_types=None,
positions=None,
):
"""Initialize extension token data.
:param name: Token name
@@ -65,7 +91,7 @@ class Extension:
:param supply: Token supply (in smallest precision units)
:type supply: int
:param extra: Extra parameters to pass to token application constructor
:type extra: list
:type extra: list
:param extra_types: Type specifications for extra parameters
:type extra_types: list
:param positions: Sequence of parameter indices to pass to application constructor
@@ -74,22 +100,20 @@ class Extension:
:returns: Token data state of extension after load
"""
self.token_details = {
'name': name,
'symbol': symbol,
'precision': precision,
'code': code,
'supply': supply,
'extra': extra,
'extra_types': extra_types,
'positions': positions,
}
"name": name,
"symbol": symbol,
"precision": precision,
"code": code,
"supply": supply,
"extra": extra or [],
"extra_types": extra_types or [],
"positions": positions,
}
logg.debug(f"token details: {self.token_details}")
return self.token_details
def prepare_extension(self):
"""Prepare extension for publishing (noop)
"""
pass
"""Prepare extension for publishing (noop)"""
def parse_code_as_file(self, v):
@@ -101,17 +125,14 @@ class Extension:
:type v: str
"""
try:
f = open(v, 'r')
f = open(v, "r", encoding="utf-8")
r = f.read()
f.close()
self.parse_code_as_hex(r)
except FileNotFoundError:
logg.debug('could not parse code as file: {}'.format(e))
pass
except IsADirectoryError:
logg.debug('could not parse code as file: {}'.format(e))
pass
except FileNotFoundError as e:
logg.debug(f"could not parse code as file: {e}")
except IsADirectoryError as e:
logg.debug(f"could not parse code as file: {e}")
def parse_code_as_hex(self, v):
"""Helper method to load application bytecode from hex data into extension token data state.
@@ -121,12 +142,10 @@ class Extension:
:param v: Bytecode as hex
:type v: str
"""
try:
try:
self.token_code = valid_hex(v)
except ValueError as e:
logg.debug('could not parse code as hex: {}'.format(e))
pass
logg.debug(f"could not parse code as hex: {e}")
def load_code(self, hint=None):
"""Attempt to load token application bytecode using token settings.
@@ -136,57 +155,57 @@ class Extension:
:rtype: str (hex)
:return: Bytecode loaded into extension token data state
"""
code = self.token_details['code']
if hint == 'hex':
code = self.token_details["code"]
if hint == "hex":
self.token_code = valid_hex(code)
for m in [
self.parse_code_as_hex,
self.parse_code_as_file,
]:
self.parse_code_as_hex,
self.parse_code_as_file,
]:
m(code)
if self.token_code != None:
if self.token_code is not None:
break
if self.token_code == None:
raise RuntimeError('could not successfully parse token code')
if self.token_code is None:
raise RuntimeError("could not successfully parse token code")
return self.token_code
def process(self, writer=None):
"""Adapter used by Processor to process the extensions implementing the Extension base class.
Requires either token address or a valid token code reference to have been included in settings. If token address is not set, the token application code will be deployed.
Requires either token address or a valid token code reference to have been included in settings.
If token address is not set, the token application code will be deployed.
:param writer: Writer to use for publishing.
:type writer: cic.output.OutputWriter
:type writer: cic.writers.OutputWriter
:rtype: tuple
:return: Token address, token symbol
"""
if writer == None:
if writer is None:
writer = self.outputs_writer
tasks = []
self.token_address = self.resources['token']['reference']
tasks = []
self.token_address = self.resources["token"]["reference"]
# TODO: get token details when token address is not none
if self.token_address == None:
if self.token_details['code'] == None:
raise RuntimeError('neither token address nor token code has been set')
if self.token_address is None:
if self.token_details["code"] is None:
raise RuntimeError("neither token address nor token code has been set")
self.load_code()
tasks.append('token')
tasks.append("token")
for k in self.resources.keys():
if k == 'token':
if k == "token":
continue
if self.resources[k]['reference'] != None:
if self.resources[k]["reference"] is not None:
tasks.append(k)
self.prepare_extension()
for task in tasks:
logg.debug('extension adapter process {}'.format(task))
r = getattr(self, 'process_' + task)(writer=writer)
logg.debug(f"extension adapter process {task}")
_r = getattr(self, "process_" + task)(writer=writer)
return (self.token_address, self.token_details.get('symbol'))
return (self.token_address, self.token_details.get("symbol"))

View File

@@ -1,10 +0,0 @@
class Hasher:
def __basehasher(self, v):
h = hashlib.sha256()
h.update(v)
return h.digest()
def hash(self, v):
return self.__basehasher(v)

View File

@@ -23,9 +23,9 @@ class KeystoreDirectory(Keystore):
except IsADirectoryError:
pass
except KeyfileError as e:
logg.warning('file {} could not be parsed as keyfile: {}'.format(fp, e))
logg.warning(f'file {fp} could not be parsed as keyfile: {e}')
except DecryptError as e:
if password_retriever == None:
if password_retriever is None:
raise e
password = password_retriever()
self.import_keystore_file(fp, password=password)

View File

@@ -1,141 +0,0 @@
# standard imports
import os
import json
import logging
import base64
# external imports
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
from cic_types.ext.metadata import MetadataRequestsHandler
from hexathon import strip_0x
# local imports
from .base import (
Data,
data_dir,
)
from cic.output import OutputWriter
logg = logging.getLogger(__name__)
class Meta(Data):
"""Serialize and publish metadata for token.
The token metadata is any mutable data that is not part of the initial token proof, but published simultaneously as the token nonetheless.
:param path: Path to settings directory
:type path: str
:param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter
"""
def __init__(self, path='.', writer=None):
super(Meta, self).__init__()
self.name = None
self.contact = {}
self.path = path
self.writer = writer
self.meta_path = os.path.join(self.path, 'meta.json')
def load(self):
"""Load metadata from settings.
"""
super(Meta, self).load()
f = open(self.meta_path, 'r')
o = json.load(f)
f.close()
self.name = o['name']
self.contact = o['contact']
self.inited = True
def start(self):
"""Initialize metadata settings from template.
"""
super(Meta, self).start()
meta_template_file_path = os.path.join(data_dir, 'meta_template_v{}.json'.format(self.version()))
f = open(meta_template_file_path)
o = json.load(f)
f.close()
f = open(self.meta_path, 'w')
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def reference(self, token_address):
"""Calculate the mutable reference for the token metadata.
"""
token_address_bytes = bytes.fromhex(strip_0x(token_address))
return generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
def asdict(self):
"""Output proof state to dict.
"""
return {
'name': self.name,
'contact': self.contact,
}
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish metadata.
See cic.processor.Processor.process
"""
if writer == None:
writer = self.writer
v = json.dumps(self.asdict())
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
writer.write(k, v.encode('utf-8'))
token_symbol_bytes = token_symbol.encode('utf-8')
k = generate_metadata_pointer(token_symbol_bytes, MetadataPointer.TOKEN_META_SYMBOL)
writer.write(k, v.encode('utf-8'))
return (k, v)
def __str__(self):
s = "contact.name = {}\n".format(self.name)
for k in self.contact.keys():
if self.contact[k] == '':
continue
s += "contact.{} = {}\n".format(k.lower(), self.contact[k])
return s
class MetadataWriter(OutputWriter):
"""Custom writer for publishing data under immutable content-addressed pointers in the cic-meta storage backend.
Data that is not utf-8 will be converted to base64 before publishing.
Implements cic.output.OutputWriter
"""
def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try:
v = v.decode('utf-8')
v = json.loads(v)
logg.debug('metadatawriter bindecode {} {}'.format(k, v))
except UnicodeDecodeError:
v = base64.b64encode(v).decode('utf-8')
v = json.loads(json.dumps(v))
logg.debug('metadatawriter b64encode {} {}'.format(k, v))
r = rq.create(v)
logg.info('metadata submitted at {}'.format(k))
return r

View File

@@ -1,181 +0,0 @@
# standard imports
import os
import json
import logging
import tempfile
import cbor2
# external imports
from hexathon import strip_0x
from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer
from cic_types.ext.metadata import MetadataRequestsHandler
# local imports
from .base import *
from cic.output import OutputWriter
logg = logging.getLogger(__name__)
class Proof(Data):
"""Proof handles the immutable token proof data mapped to the initial token deployment.
It processes inputs from the proof.json file in the session directory.
Optionally, attachment objects can be added to the proof. If added, the resulting proof digest will consists of the attachment digests added to the root digest. These are then are deterministically ordered, regardless of which order attachments were given to the constructor.
:param path: Path to settings directory
:type path: str
:param attachments: List of attachment objects to include in the proof
:type attachments: cic.attachment.Attachment
:param writer: Writer interface receiving the output of the processor
:type writer: cic.output.OutputWriter
"""
def __init__(self, path='.', attachments=None, writer=None):
super(Proof, self).__init__()
self.proofs = []
self.namespace = 'ge'
self.description = None
self.issuer = None
self.path = path
self.writer = writer
self.extra_attachments = attachments
self.attachments = {}
self.proof_path = os.path.join(self.path, 'proof.json')
self.temp_proof_path = tempfile.mkstemp()[1]
def load(self):
"""Load proof data from settings.
"""
super(Proof, self).load()
f = open(self.proof_path, 'r')
o = json.load(f)
f.close()
self.set_version(o['version'])
self.description = o['description']
self.namespace = o['namespace']
self.issuer = o['issuer']
self.proofs = o['proofs']
if self.extra_attachments != None:
a = self.extra_attachments.asdict()
for k in a.keys():
self.attachments[k] = a[k]
hshs = self.__get_ordered_hashes()
self.proofs = list(map(strip_0x, hshs))
self.inited = True
def start(self):
"""Initialize proof settings from template.
"""
super(Proof, self).start()
proof_template_file_path = os.path.join(data_dir, 'proof_template_v{}.json'.format(self.version()))
f = open(proof_template_file_path)
o = json.load(f)
f.close()
f = open(self.proof_path, 'w')
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def asdict(self):
"""Output proof state to dict.
"""
return {
'version': self.version(),
'namespace': self.namespace,
'description': self.description,
'issuer': self.issuer,
'proofs': self.proofs,
}
# TODO: the efficiency of this method could probably be improved.
def __get_ordered_hashes(self):
ks = list(self.attachments.keys())
ks.sort()
return ks
# def get(self):
# hsh = self.hash(b).hex()
# self.attachments[hsh] = self.temp_proof_path
# logg.debug('cbor of {} is {} hashes to {}'.format(v, b.hex(), hsh))
def root(self):
"""Calculate the root digest from the serialized proof object.
"""
v = self.asdict()
#b = cbor2.dumps(v)
b = json.dumps(v)
f = open(self.temp_proof_path, 'w')
f.write(b)
f.close()
b = b.encode('utf-8')
k = self.hash(b)
return (k.hex(), b)
def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish proof.
See cic.processor.Processor.process
"""
if writer == None:
writer = self.writer
(k, v) = self.root()
writer.write(k, v)
root_key = k
token_symbol_bytes = token_symbol.encode('utf-8')
k = generate_metadata_pointer(token_symbol_bytes, MetadataPointer.TOKEN_PROOF_SYMBOL)
writer.write(k, v)
token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_PROOF)
writer.write(k, v)
# (hsh, hshs) = self.get()
#hshs = list(map(strip_0x, hshs))
# hshs_bin = list(map(bytes.fromhex, hshs))
# hshs_cat = b''.join(hshs_bin)
# f = open(self.temp_proof_path, 'rb')
# v = f.read()
# f.close()
# writer.write(hsh, v)
# r = self.hash(hshs_cat)
# r_hex = r.hex()
#logg.debug('generated proof {} for hashes {}'.format(r_hex, hshs))
#writer.write(r_hex, hshs_cat)
o = self.asdict()
f = open(self.proof_path, 'w')
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
return root_key
def __str__(self):
return "description = {}\n".format(self.description)

View File

@@ -1,16 +1,18 @@
# standard imports
import os
import logging
import argparse
import sys
import importlib
# external imports
import chainlib.cli
# local imports
import cic.cmd.init as cmd_init
import cic.cmd.show as cmd_show
import cic.cmd.ext as cmd_ext
import cic.cmd.export as cmd_export
import cic.cmd.wizard as cmd_wizard
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
@@ -21,41 +23,53 @@ base_config_dir = os.path.join(data_dir, 'config')
schema_dir = os.path.join(script_dir, '..', 'schema')
arg_flags = chainlib.cli.argflag_std_read | chainlib.cli.Flag.SEQ
argparser = chainlib.cli.ArgumentParser(env=os.environ, arg_flags=arg_flags, description='CIC cli tool for generating and publishing tokens')
argparser = chainlib.cli.ArgumentParser(
env=os.environ,
arg_flags=arg_flags,
description='CIC cli tool for generating and publishing tokens'
)
sub = argparser.add_subparsers()
sub.dest = 'command'
sub_init = sub.add_parser('init', help='initialize new cic data directory')
cmd_init.process_args(sub_init)
sub_show = sub.add_parser('show', help='display summary of current state of cic data directory')
cmd_show.process_args(sub_show)
sub_export = sub.add_parser('export', help='export cic data directory state to a specified target')
cmd_export.process_args(sub_export)
sub_ext = sub.add_parser('ext', help='extension helpers')
cmd_ext.process_args(sub_ext)
sub_wizard = sub.add_parser('wizard', help='An interactive wizard for creating and publishing contracts')
cmd_wizard.process_args(sub_wizard)
args = argparser.parse_args(sys.argv[1:])
if args.command == None:
if args.command is None:
logg.critical('Subcommand missing')
sys.stderr.write("\033[;91m" + 'subcommand missing' + "\033[;39m\n")
argparser.print_help(sys.stderr)
sys.exit(1)
modname = 'cic.cmd.{}'.format(args.command)
logg.debug('using module {}'.format(modname))
modname = f'cic.cmd.{args.command}'
logg.debug(f'using module {modname}')
cmd_mod = importlib.import_module(modname)
extra_args = {
'p': 'RPC_PROVIDER',
}
}
config = chainlib.cli.Config.from_args(args, arg_flags=arg_flags, base_config_dir=base_config_dir, extra_args=extra_args)
def main():
try:
cmd_mod.execute(config, args)
except Exception as e:
logg.exception(e) #'{}'.format(e))
logg.exception(e)
sys.stderr.write("\033[;91m" + str(e) + "\033[;39m\n")
sys.exit(1)

View File

@@ -1,88 +0,0 @@
# standard imports
import os
import json
# local imports
from .base import (
Data,
data_dir,
)
class Token(Data):
"""Encapsulates the token data used by the extension to deploy and/or register token and token related applications on chain.
Token details (name, symbol etc) will be used to initialize the token settings when start is called. If load is called instead, any token detail parameters passed to the constructor will be overwritten by data stored in the settings.
:param path: Settings directory path
:type path: str
:param name: Token name
:type name: str
:param symbol: Token symbol
:type symbol: str
:param precision: Token value precision (number of decimals)
:type precision: int
:param supply: Token supply (in smallest precision units)
:type supply: int
:param code: Bytecode for token chain application
:type code: str (hex)
"""
def __init__(self, path='.', name=None, symbol=None, precision=1, supply=0, code=None):
super(Token, self).__init__()
self.name = name
self.symbol = symbol
self.supply = supply
self.precision = precision
self.code = code
self.extra_args = None
self.path = path
self.token_path = os.path.join(self.path, 'token.json')
def load(self):
"""Load token data from settings.
"""
super(Token, self).load()
f = open(self.token_path, 'r')
o = json.load(f)
f.close()
self.name = o['name']
self.symbol = o['symbol']
self.precision = o['precision']
self.code = o['code']
self.supply = o['supply']
self.extra_args = o['extra']
self.inited = True
def start(self):
"""Initialize token settings from arguments passed to the constructor and/or template.
"""
super(Token, self).load()
token_template_file_path = os.path.join(data_dir, 'token_template_v{}.json'.format(self.version()))
f = open(token_template_file_path)
o = json.load(f)
f.close()
o['name'] = self.name
o['symbol'] = self.symbol
o['precision'] = self.precision
o['code'] = self.code
o['supply'] = self.supply
f = open(self.token_path, 'w')
json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def __str__(self):
s = """name = {}
symbol = {}
precision = {}
""".format(self.name, self.symbol, self.precision)
return s

24
cic/utils.py Normal file
View File

@@ -0,0 +1,24 @@
def object_to_str(obj, keys):
"""Return a string representation of an object."""
s = ""
for key in keys:
value = eval("obj." + key)
key = key.replace("()", "")
if type(value) == str:
s += f"{key} = {value}\n"
elif type(value) == list:
for idx, vv in enumerate(value):
if not vv:
s += f"{key}[{idx}] = \n"
continue
s += f"{key}[{idx}] = {vv}\n"
elif type(value) == dict:
for vv_key in value.keys():
vv_value = value[vv_key]
if not vv_value:
s += f"{key}.{vv_key} = \n"
continue
s += f"{key}.{vv_key} = {vv_value}\n"
else:
s += f"{key} = {str(value)}\n"
return s

View File

@@ -51,7 +51,7 @@ class HTTPWriter(OutputWriter):
path = self.path
if k != None:
path = os.path.join(path, k)
logg.debug('http writer post {}'.format(path))
logg.debug(f'http writer post {path} \n key: {k}, value: {v}')
rq = urllib.request.Request(path, method='POST', data=v)
r = urllib.request.urlopen(rq)
logg.info('http writer submitted at {}'.format(r.read()))
@@ -64,14 +64,14 @@ class KeyedWriter(OutputWriter):
self.writer_immutable = writer_immutable
def write(self, k, v):
logg.debug('writing keywriter {} {}'.format(k, v))
if isinstance(v, str):
v = v.encode('utf-8')
def write(self, key, value):
logg.debug(f'writing keywriter key: {key} value: {value}')
if isinstance(value, str):
value = value.encode('utf-8')
if self.writer_keyed != None:
self.writer_keyed.write(k, v)
self.writer_keyed.write(key, value)
if self.writer_immutable != None:
self.writer_immutable.write(None, v)
self.writer_immutable.write(None, value)
class KeyedWriterFactory: