docstrings for eth extension

This commit is contained in:
nolash 2021-11-29 19:15:10 +01:00
parent 8f78e80bb1
commit 6e5a80ae7c
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
3 changed files with 33 additions and 3 deletions

View File

@ -129,15 +129,32 @@ class CICEth(Extension):
def add_outputs(self, k, v): def add_outputs(self, k, v):
"""Adds given key/value pair to outputs array.
:param k: Output key
:type k: str
:param v: Output value
:param v: bytes or str
"""
logg.debug('adding outputs {} {}'.format(k, v)) logg.debug('adding outputs {} {}'.format(k, v))
self.outputs.append((k, v)) self.outputs.append((k, v))
def get_outputs(self): def get_outputs(self):
"""Get wrapper for outputs captured from processing.
:rtype: list of tuples
:return: Captured outputs
"""
return self.outputs return self.outputs
def process_token(self, writer=None): def process_token(self, writer=None):
"""Deploy token, and optionally mint token supply to token deployer account.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
"""
if writer == None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
@ -200,6 +217,11 @@ class CICEth(Extension):
def process_token_index(self, writer=None): def process_token_index(self, writer=None):
"""Register deployed token with token index.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
"""
if writer == None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
@ -228,6 +250,11 @@ class CICEth(Extension):
def process_address_declarator(self, writer=None): def process_address_declarator(self, writer=None):
"""Register token proofs with address declarator.
:param writer: Writer interface receiving the output of the processor step
:type writer: cic.output.OutputWriter
"""
if writer == None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
@ -268,6 +295,8 @@ class CICEth(Extension):
def prepare_extension(self): def prepare_extension(self):
"""Sets token address for extension if defined in settings.
"""
super(CICEth, self).prepare_extension() super(CICEth, self).prepare_extension()
if self.token_address != None: if self.token_address != None:
@ -275,4 +304,8 @@ class CICEth(Extension):
def new(chain_spec, resources, proof, signer_hint=None, rpc=None, outputs_writer=None): def new(chain_spec, resources, proof, signer_hint=None, rpc=None, outputs_writer=None):
"""Convenience function to enable object instantiation through predictable module symbol
See CICEth
"""
return CICEth(chain_spec, resources, proof, signer=signer_hint, rpc=rpc, outputs_writer=outputs_writer) return CICEth(chain_spec, resources, proof, signer=signer_hint, rpc=rpc, outputs_writer=outputs_writer)

View File

@ -20,7 +20,6 @@ test_data_dir = os.path.join(test_base_dir, 'testdata')
proof_hash = '0f6fc017f29caf512c0feaaf83bc10614b488311cace2973dc248dc24b01e04f' proof_hash = '0f6fc017f29caf512c0feaaf83bc10614b488311cace2973dc248dc24b01e04f'
foo_hash = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae' foo_hash = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
bar_hash = 'fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9' bar_hash = 'fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9'
#root_merged_hash = '4bd0ad4305a5fee20fb80e179a437c296f6a769ca376d746a3848a80e9b7a1a6'
root_merged_hash = '795fed550ada0ec1eea4309a282f5910bc3bdb3a9762c7d9cc25d6de71c45096' root_merged_hash = '795fed550ada0ec1eea4309a282f5910bc3bdb3a9762c7d9cc25d6de71c45096'
root_unmerged_hash = '5dc81e51703e624f498663e7d5d70429b824e9ff60f92b61fe47eb6862a971b4' root_unmerged_hash = '5dc81e51703e624f498663e7d5d70429b824e9ff60f92b61fe47eb6862a971b4'

View File

@ -43,8 +43,6 @@ class TestCICEthBase(EthTesterCase):
random.seed(42) random.seed(42)
f = open('/dev/urandom', 'rb') f = open('/dev/urandom', 'rb')
#self.initial_description = add_0x(random.randbytes(32).hex())
#self.token_address = add_0x(random.randbytes(20).hex())
initial_description_bytes = f.read(32) initial_description_bytes = f.read(32)
self.initial_description = add_0x(initial_description_bytes.hex()) self.initial_description = add_0x(initial_description_bytes.hex())
token_address_bytes = f.read(20) token_address_bytes = f.read(20)