Add customizable writers, configs, chain spec in network

This commit is contained in:
nolash 2021-10-11 17:39:01 +02:00
parent 23567905a1
commit 8451285d0d
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
19 changed files with 159 additions and 61 deletions

View File

@ -10,10 +10,11 @@ logg = logging.getLogger(__name__)
class Attachment(Data):
def __init__(self, path='.'):
def __init__(self, path='.', writer=None):
super(Attachment, self).__init__()
self.contents = {}
self.path = path
self.writer = writer
self.attachment_path = os.path.join(self.path, 'attachments')
@ -43,6 +44,17 @@ class Attachment(Data):
return self.contents
def process(self, token_address=None, writer=None):
if writer == None:
writer = self.writer
for k in self.contents.keys():
f = open(self.contents[k], 'rb')
v = f.read()
f.close()
logg.debug('writing {}'.format(k))
writer.write(k, v)
def __str__(self):
s = ''
for i in range(len(self.contents)):

View File

@ -22,14 +22,32 @@ def validate_args(args):
pass
def init_writers_from_config(config):
w = {
'meta': None,
'attachment': None,
'proof': None,
}
for v in w.keys():
k = 'CIC_CORE_{}_WRITER'.format(v.upper())
(d, c) = config.get(k).rsplit('.', maxsplit=1)
m = importlib.import_module(d)
o = getattr(m, c)
w[v] = o
return w
def execute(config, eargs):
modname = 'cic.ext.{}'.format(eargs.target)
cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config)
ct = Token(path=eargs.directory)
cm = Meta(path=eargs.directory)
ca = Attachment(path=eargs.directory)
cp = Proof(path=eargs.directory, attachments=ca)
cm = Meta(path=eargs.directory, writer=writers['meta'])
ca = Attachment(path=eargs.directory, writer=writers['attachment'])
cp = Proof(path=eargs.directory, attachments=ca, writer=writers['proof'])
cn = Network(path=eargs.directory)
ct.load()
@ -38,6 +56,7 @@ def execute(config, eargs):
ca.load()
cn.load()
ref = cn.reference(eargs.target)
ref = cn.resource(eargs.target)
chain_spec = cn.chain_spec(eargs.target)
logg.debug('found reference {} for target {}'.format(ref, eargs.target))
getattr(cmd_mod, 'new')(ref, cp, signer_hint=eargs.signer)
getattr(cmd_mod, 'new')(chain_spec, ref, cp, signer_hint=eargs.signer)

View File

@ -13,7 +13,7 @@ logg = logging.getLogger(__name__)
def process_args(argparser):
argparser.add_argument('--target', action='append', type=str, help='initialize network specification file with target')
argparser.add_argument('--target', action='append', type=str, default=[], help='initialize network specification file with target')
argparser.add_argument('--name', type=str, help='token name')
argparser.add_argument('--symbol', type=str, help='token symbol')
argparser.add_argument('--precision', type=str, help='token unit precision')

0
cic/config.py Normal file
View File

View File

@ -0,0 +1,4 @@
[cic_core]
meta_writer = cic.output.KVWriter
attachment_writer = cic.output.KVWriter
proof_writer = cic.output.KVWriter

View File

@ -1,4 +1,12 @@
{
"chain_spec": {
"arch": null,
"fork": null,
"network_id": null,
"common_name": null,
"extra": {}
},
"contents": {
"token": {
"reference": null,
"key_account": null
@ -11,4 +19,5 @@
"reference": null,
"key_account": null
}
}
}

View File

@ -134,7 +134,6 @@ class CICEth:
if writer == None:
writer = self.outputs_writer
logg.debug('ZZZZZZZZ token details {}'.format(self.token_details))
(args, args_types, positions) = self.__order_args()
enc = ABIContractEncoder()
@ -212,6 +211,7 @@ class CICEth:
results = []
for proof in self.proof.get():
k = 'address_declarator_' + proof
o = c.add_declaration(contract_address, signer_address, self.token_address, proof, tx_format=self.tx_format)
r = None
if self.rpc != None:
@ -220,8 +220,10 @@ class CICEth:
r = o[1]
else:
r = o
self.add_outputs('address_declarator', r)
self.add_outputs(k, r)
results.append(r)
if writer != None:
writer.write(k, r.encode('utf-8'))
return results
@ -249,5 +251,5 @@ class CICEth:
return self.token_address
def new(resources, proof, signer_hint=None):
return CICEth(resources, proof, signer=None)
def new(chain_spec, resources, proof, signer_hint=None):
return CICEth(chain_spec, resources, proof, signer=None)

View File

@ -16,11 +16,12 @@ from .base import (
class Meta(Data):
def __init__(self, path='.'):
def __init__(self, path='.', writer=None):
super(Meta, self).__init__()
self.name = None
self.contact = {}
self.path = path
self.writer = writer
self.meta_path = os.path.join(self.path, 'meta.json')
@ -64,9 +65,12 @@ class Meta(Data):
def process(self, token_address=None, writer=None):
if writer == None:
writer = self.writer
k = self.reference(token_address)
v = json.dumps(self.asdict())
writer.write(k, v)
writer.write(k, v.encode('utf-8'))
return (k, v)

View File

@ -2,6 +2,9 @@
import os
import json
# external imports
from chainlib.chain import ChainSpec
# local imports
from .base import (
Data,
@ -45,22 +48,28 @@ class Network(Data):
for v in self.targets:
o['resources'][v] = o_part
json.dump(o, f)
f.close()
def reference(self, k):
v = self.references.get(k)
def resource(self, k):
v = self.resources.get(k)
if v == None:
raise AttributeError('no defined reference for {}'.format(k))
return v
def chain_spec(self, k):
v = self.resource(k)
return ChainSpec.from_dict(v['chain_spec'])
def __str__(self):
s = ''
for k in self.resources.keys():
for kk in self.resources[k].keys():
v = self.resources[k][kk]
for kk in self.resources[k]['contents'].keys():
v = self.resources[k]['contents'][kk]
if v == None:
v = ''
s += '{}.{} = {}\n'.format(k, kk, v)

View File

@ -1,8 +1,21 @@
# standard imports
import os
import sys
class KVWriter:
class OutputWriter:
def write(self, k, v):
raise NotImplementedError()
class StdoutWriter:
def write(self, k, v):
sys.stdout.write('{}\t{}\n'.format(k, v))
class KVWriter(OutputWriter):
def __init__(self, path):
os.stat(path)
@ -11,6 +24,6 @@ class KVWriter:
def write(self, k, v):
fp = os.path.join(self.path, str(k))
f = open(fp, 'w')
f = open(fp, 'wb')
f.write(v)
f.close()

View File

@ -6,10 +6,13 @@ logg = logging.getLogger(__name__)
class Processor:
def __init__(self, metadata=None, outputs_writer=None, extensions=[]):
def __init__(self, outputs_writer=None, metadata=None, attachment=None, extensions=[]):
self.token_address = None
self.metadata = metadata
self.extensions = extensions
self.cores = {
'metadata': metadata,
'attachment': attachment,
}
self.outputs = []
self.__outputs_writer = outputs_writer
@ -18,15 +21,6 @@ class Processor:
return self.__outputs_writer
def can_process(self):
return self.token_address != None
def process_metadata(self, writer=None):
if not self.can_process():
raise RuntimeError('incomplete processing state for metadata')
def get_outputs(self):
outputs = []
for ext in self.extensions:
@ -38,13 +32,14 @@ class Processor:
def process(self):
tasks = [
'metadata',
'attachment',
]
for ext in self.extensions:
token_address = ext.process()
for task in tasks:
a = getattr(self, task)
a = self.cores.get(task)
if a == None:
logg.debug('skipping missing task receiver "{}"'.format(task))
continue

View File

@ -16,11 +16,12 @@ logg = logging.getLogger(__name__)
class Proof(Data):
def __init__(self, path='.', attachments=None):
def __init__(self, path='.', attachments=None, writer=None):
super(Proof, self).__init__()
self.namespace = 'ge'
self.description = None
self.path = path
self.writer = writer
self.extra_attachments = attachments
self.attachments = {}
self.proof_path = os.path.join(self.path, 'proof.json')

View File

@ -6,6 +6,7 @@ import sys
import importlib
# external imports
import chainlib.cli
import cic.cmd.init as cmd_init
import cic.cmd.show as cmd_show
import cic.cmd.export as cmd_export
@ -15,11 +16,10 @@ logg = logging.getLogger()
script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(script_dir, '..', 'data')
base_config_dir = os.path.join(data_dir, 'config')
schema_dir = os.path.join(script_dir, '..', 'schema')
argparser = argparse.ArgumentParser(description='CIC cli tool for generating and publishing tokens')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
argparser = chainlib.cli.ArgumentParser(env=os.environ, description='CIC cli tool for generating and publishing tokens')
sub = argparser.add_subparsers()
sub.dest = 'command'
@ -31,11 +31,7 @@ sub_export = sub.add_parser('export', help='export cic data directory state to a
cmd_export.process_args(sub_export)
args = argparser.parse_args(sys.argv[1:])
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config = chainlib.cli.Config.from_args(args, base_config_dir=base_config_dir)
if args.command == None:
logg.critical('Subcommand missing')
@ -45,7 +41,6 @@ modname = 'cic.cmd.{}'.format(args.command)
logg.debug('using module {}'.format(modname))
cmd_mod = importlib.import_module(modname)
config = None
def main():
#try:

View File

@ -1,2 +1,4 @@
funga>=0.5.0a1,<0.6.0
cic-types>=0.2.0a1,<=0.2.0
confini>=0.4.2rc3,<0.5.0
chainlib>=0.0.10a1,<0.1.0

View File

@ -38,7 +38,7 @@ class TestCICBase(unittest.TestCase):
self.outputs_dir = tempfile.mkdtemp()
self.outputs_writer = KVWriter(self.outputs_dir)
self.core_processor = Processor(self.token_address, outputs_writer=self.outputs_writer)
self.core_processor = Processor(outputs_writer=self.outputs_writer)
self.resources = {
'token': {

View File

@ -89,8 +89,8 @@ class TestCICEthBase(EthTesterCase):
self.proofs = Proof(proof_dir, attachments=attach)
self.proofs.load()
d = tempfile.mkdtemp()
self.outputs_writer = KVWriter(d)
self.outputs_dir = tempfile.mkdtemp()
self.outputs_writer = KVWriter(self.outputs_dir)
class TestCICEthTokenBase(TestCICEthBase):

View File

@ -1,6 +1,7 @@
# standard imports
import unittest
import logging
import os
# local imports
from cic.ext.eth import CICEth
@ -18,7 +19,8 @@ class TestCICEthOffline(TestCICEthBase):
def setUp(self):
super(TestCICEthOffline, self).setUp()
self.adapter = CICEth(self.chain_spec, self.resources, self.proofs)
self.core_processor = Processor(outputs_writer=self.outputs_writer, extensions=[self.adapter])
self.first_proof = self.proofs.get()[0]
#self.core_processor = Processor(outputs_writer=self.outputs_writer, extensions=[self.adapter])
def test_offline_token_index(self):
@ -31,11 +33,28 @@ class TestCICEthOffline(TestCICEthBase):
def test_offline_address_declarator(self):
self.adapter.token_address = self.token_address
self.adapter.process_address_declarator()
self.assertEqual(self.adapter.outputs[0][0], 'address_declarator')
first_proof = self.proofs.get()[0]
self.assertEqual(self.adapter.outputs[0][0], 'address_declarator_' + self.first_proof)
self.assertEqual(self.adapter.outputs[0][1][:8], 'ae47ece0')
self.assertEqual(len(self.adapter.outputs), 3)
def test_offline_writer(self):
self.adapter.outputs_writer = self.outputs_writer
self.adapter.token_address = self.token_address
self.adapter.process_address_declarator()
self.assertEqual(self.adapter.outputs[0][0], 'address_declarator_' + self.first_proof)
self.assertEqual(self.adapter.outputs[0][1][:8], 'ae47ece0')
self.assertEqual(len(self.adapter.outputs), 3)
proofs = self.proofs.get()
for i, v in enumerate(self.adapter.outputs):
fp = os.path.join(self.outputs_dir, v[0])
f = open(fp, 'rb')
r = f.read()
f.close()
self.assertEqual(r.decode('utf-8'), v[1])
if __name__ == '__main__':
unittest.main()

View File

@ -19,7 +19,7 @@ logg = logging.getLogger()
class TestCICOutput(TestCICBase):
def test_output_file(self):
self.outputs_writer.write('foo', 'bar')
self.outputs_writer.write('foo', b'bar')
fp = os.path.join(self.outputs_dir, 'foo')
f = open(fp, 'r')
v = f.read()

View File

@ -9,6 +9,7 @@ from hexathon import strip_0x
# local imports
from cic.processor import Processor
from cic.attachment import Attachment
from cic.meta import Meta
# test imports
@ -31,7 +32,7 @@ class MockExt:
class TestCICProcessor(TestCICBase):
def test_processor(self):
def test_processor_meta(self):
fp = os.path.join(test_data_dir, 'proof')
m = Meta(fp)
m.load()
@ -49,5 +50,18 @@ class TestCICProcessor(TestCICBase):
self.assertEqual(m.asdict(), o)
def test_processor_attachment(self):
fp = os.path.join(test_data_dir, 'proof')
m = Attachment(fp)
m.load()
mock_ext = MockExt(self.token_address)
p = Processor(attachment=m, outputs_writer=self.outputs_writer, extensions=[mock_ext])
p.process()
for k in list(m.contents.keys()):
fp = os.path.join(self.outputs_dir, k)
os.stat(fp)
if __name__ == '__main__':
unittest.main()