Compare commits

..

4 Commits

Author SHA1 Message Date
fb3253ae55 save 2022-02-21 08:34:25 +03:00
d3f65798f1 feat: add interactive token deployment 2022-02-10 11:22:21 +03:00
1498edbb07
Adds capability to deploy demurrage tokens. 2021-12-15 16:00:51 +03:00
10cbb1344d
Cosmetic clean ups. 2021-12-15 16:00:18 +03:00
78 changed files with 1992 additions and 5009 deletions

View File

@ -1,2 +0,0 @@
[report]
omit =

View File

@ -1,65 +0,0 @@
---
################
# Test #
################
kind: pipeline
name: default
type: docker
steps:
# Run tests against Python with pytest
- name: test
image: python:3.8
commands:
# Install dependencies
- pip install poetry
- poetry install -E eth
- poetry run pylint cic --fail-under=8.00
- poetry run pytest
environment:
LOGLEVEL: info
volumes:
- name: poetry_cache
path: /root/.cache/pypoetry
- name: pip_cache
path: /root/.cache/pip
when:
event:
- push
- name: publish
image: python:3.8
commands:
# Install dependencies
- pip install poetry
- poetry install
- poetry run semantic-release publish
depends_on:
- test
when:
branch:
- master
event:
- push
environment:
LOGLEVEL: info
GIT_SSL_NO_VERIFY: 1
REPOSITORY_USERNAME: __token__
REPOSITORY_PASSWORD:
from_secret: pypi_token
GITEA_TOKEN:
from_secret: gitea_token
volumes:
- name: poetry_cache
path: /root/.cache/pypoetry
- name: pip_cache
path: /root/.cache/pip
volumes:
- name: poetry_cache
host:
path: /tmp/cache/drone/pypoetry
- name: pip_cache
host:
path: /tmp/cache/drone/pip

7
.gitignore vendored
View File

@ -2,11 +2,8 @@ __pycache__
*.pyc *.pyc
*.egg-info *.egg-info
.venv .venv
build/ build
.vscode .vscode
.idea .idea
contracts contracts
*.egg *.egg
.coverage
deployments/
dist/

View File

@ -59,7 +59,7 @@ confidence=
# #
# Kubeflow disables string-interpolation because we are starting to use f # Kubeflow disables string-interpolation because we are starting to use f
# style strings # style strings
disable=old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,missing-docstring,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,relative-import,invalid-name,bad-continuation,no-member,locally-disabled,fixme,import-error,too-many-locals,no-name-in-module,too-many-instance-attributes,no-self-use,logging-fstring-interpolation disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,missing-docstring,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,relative-import,invalid-name,bad-continuation,no-member,locally-disabled,fixme,import-error,too-many-locals,no-name-in-module,too-many-instance-attributes,no-self-use,logging-fstring-interpolation
[REPORTS] [REPORTS]

4
CHANGELOG Normal file
View File

@ -0,0 +1,4 @@
- 0.0.2
* Add executable entry point in package install
- 0.0.1
* Token creation setup for eth

View File

@ -1,104 +0,0 @@
# Changelog
<!--next-version-placeholder-->
## v0.5.5 (2023-03-24)
### Fix
* Lock erc20-demurrage-token ([`3cde79e`](https://git.grassecon.net/cicnet/cic-cli/commit/3cde79ef8fb77a6b03454e675568834e0ab4ba80))
## v0.5.4 (2022-07-05)
### Fix
* Pass headers through KeyedWriterFactory ([`de78753`](https://git.grassecon.net/cicnet/cic-cli/commit/de78753675242dd253359a5a5601d9062d81f0ee))
## v0.5.3 (2022-07-05)
### Fix
* Add auth headers to HTTPWriter ([`4eda0fb`](https://git.grassecon.net/cicnet/cic-cli/commit/4eda0fb5cc2c41a735619dc3e34f21c4e27fd112))
## v0.5.2 (2022-07-05)
### Fix
* Bump cic-types ([`15ae114`](https://git.grassecon.net/cicnet/cic-cli/commit/15ae1143a5230078219072d096741546ebcc3d07))
## v0.5.1 (2022-07-05)
### Fix
* Upgrade cic-types to support meta auth ([`22b3062`](https://git.grassecon.net/cicnet/cic-cli/commit/22b3062c4909400664bd2a50ca36d5ee737531a1))
## v0.5.0 (2022-07-04)
### Feature
* Add meta-auth ([#4](https://git.grassecon.net/cicnet/cic-cli/issues/4)) ([`bfe7086`](https://git.grassecon.net/cicnet/cic-cli/commit/bfe7086178f3fc2743dd68cc20c5459ca466ae8e))
## v0.4.1 (2022-06-14)
### Fix
* Bump deps ([`e36ea4b`](https://git.grassecon.net/cicnet/cic-cli/commit/e36ea4bcfb1c417d1adf2be9455cb20b23323414))
## v0.4.0 (2022-04-29)
### Feature
* Add giftable generation ([`b7acbdc`](https://git.grassecon.net/cicnet/cic-cli/commit/b7acbdc4bc5862752585fecfaee7d2fe70d8dbbe))
## v0.3.4 (2022-04-27)
### Fix
* Bump deps again ([`37188a6`](https://git.grassecon.net/cicnet/cic-cli/commit/37188a60e85d9545acfd950c1c160801c22d2b5b))
## v0.3.3 (2022-04-26)
### Fix
* It's ok if you already exsist ([`38cfb18`](https://git.grassecon.net/cicnet/cic-cli/commit/38cfb185270fb361ff5d9da9976745e1fecc40f8))
* Take the reins off ([`c84517e`](https://git.grassecon.net/cicnet/cic-cli/commit/c84517e3db264f541e6e5a8eef30703bf28d32d0))
* Bump deps ([`dcea763`](https://git.grassecon.net/cicnet/cic-cli/commit/dcea763ce5b3d542ed0a50586720fc3a45142e77))
* **attachement:** Directory not getting created ([`e55b82f`](https://git.grassecon.net/cicnet/cic-cli/commit/e55b82f5295397b3e4123297bc6b231ca251bc83))
## v0.3.2 (2022-04-26)
### Fix
* Update deps ([`d2e55fa`](https://git.grassecon.net/cicnet/cic-cli/commit/d2e55fad0efd13fa7a1de8ed8ab43e703a4aa046))
## v0.3.1 (2022-04-26)
### Fix
* Throw if directory exsists ([`5f22220`](https://git.grassecon.net/cicnet/cic-cli/commit/5f22220825f5c485550ca9a21a54598fbe3b3ba3))
## v0.3.0 (2022-04-26)
### Feature
* **wizard:** Add csv input flag ([`a9f97a9`](https://git.grassecon.net/cicnet/cic-cli/commit/a9f97a9a5c6908e4d51710e3b121764d2511c0ab))
### Fix
* Tests ([`f300767`](https://git.grassecon.net/cicnet/cic-cli/commit/f30076783d5fc93d91d29e9343d62af4c0fdffaa))
* Bump ci ([`60e8ecc`](https://git.grassecon.net/cicnet/cic-cli/commit/60e8ecc41a472dbea25c36d869259c8161145002))
## v0.2.3 (2022-03-22)
### Fix
* Remove this ([`92794a2`](https://git.grassecon.net/cicnet/cic-cli/commit/92794a2e3b2fc5ace63f519bbe5b23c542afc853))
## v0.2.2 (2022-03-22)
### Fix
* Enfore upper case symbol name ([`71bf1e1`](https://git.grassecon.net/cicnet/cic-cli/commit/71bf1e15c4a217111ae6f6568814985a9d5b960f))
### Documentation
* Update bange urls ([`32ba293`](https://git.grassecon.net/cicnet/cic-cli/commit/32ba29354ae53bf8166bef4d117667aa314a6cfe))
## v0.2.1 (2022-03-16)
### Fix
* Update config paths ([`f7d0503`](https://git.grassecon.net/cicnet/cic-cli/commit/f7d0503c7b85d96588bf1a75fdf1cce27acf1460))
## v0.2.0 (2022-03-16)
### Feature
* Copy base configs to user configs ([`f4e370c`](https://git.grassecon.net/cicnet/cic-cli/commit/f4e370cb5db79c74abe26179f5b15bd079bdd066))
## v0.1.1 (2022-03-16)
### Fix
* Update configs ([`b51d1e9`](https://git.grassecon.net/cicnet/cic-cli/commit/b51d1e92d7ae1e3b91ca50c036ffd58e762df24b))
## v0.1.0 (2022-03-16)
### Feature
* Add interactive deployment and switch to poetry' ([#2](https://git.grassecon.net/cicnet/cic-cli/issues/2)) ([`0fcf2eb`](https://git.grassecon.net/cicnet/cic-cli/commit/0fcf2eb3bc807111db02e9e47e469ec0a965797f))
* **wizard:** Add ability to select wallet address ([`556366a`](https://git.grassecon.net/cicnet/cic-cli/commit/556366a93384bba51aa617d54bcf50f4473b790a))
* Add token symbol proof metadata references ([`a707f12`](https://git.grassecon.net/cicnet/cic-cli/commit/a707f120865186c8e4a7840d53c9dcf5f4257ab3))
### Fix
* Add getpass ([`47a9b25`](https://git.grassecon.net/cicnet/cic-cli/commit/47a9b259ae54c34df9af4aa1fb176070d305296a))
* Incorrect var name ([`41dbd5a`](https://git.grassecon.net/cicnet/cic-cli/commit/41dbd5a400287d4687d0830017466b9a43054ecf))
* **ext:** Allow loading chain_spec from config ([`1d4b051`](https://git.grassecon.net/cicnet/cic-cli/commit/1d4b0512ad65b4d2903bd7d022e562cda158a592))
* Change name to cic-cli ([`40e386d`](https://git.grassecon.net/cicnet/cic-cli/commit/40e386db1175839394f2480a1a3e1bbfc52edea9))
* Add missing json import ([`48ee805`](https://git.grassecon.net/cicnet/cic-cli/commit/48ee8050c17edb21b0dc4065bf0018b1502d4a8c))
* Broken imports ([`4f219e3`](https://git.grassecon.net/cicnet/cic-cli/commit/4f219e3d1853befa197f46a19dc8a8a76ef26811))
### Documentation
* Add cluter deployment info ([`941b9e6`](https://git.grassecon.net/cicnet/cic-cli/commit/941b9e6b650163c4f35e4b08203fb10c9309ee91))
* Rename prod config to testnet ([`67f947a`](https://git.grassecon.net/cicnet/cic-cli/commit/67f947a9af16dc01fb68459a51629320264d281f))
* Add badge ([`be5d988`](https://git.grassecon.net/cicnet/cic-cli/commit/be5d988fa4d03dfcd44f71c7c6d4a562b780da09))

137
README.md
View File

@ -1,25 +1,26 @@
# CIC Token Deployment Tool # CIC token deployment tool
[![Status](https://ci.grassecon.net/api/badges/cicnet/cic-cli/status.svg)](https://ci.grassecon.net/grassrootseconomics/cic)
[![Version](https://img.shields.io/pypi/v/cic-cli?color=green)](https://pypi.org/project/cic/)
CIC-CLI provides tooling to generate and publish metadata in relation to CIC-CLI provides tooling to generate and publish metadata in relation to
token deployments. token deployments.
```shell To install the project (replacing \<VERSION> with the current version:
pip install cic-cli[eth] 0.0.1):
```
## Usage
### Using the wizard
First make sure that you edit the configs below to add your paths for `[auth]keyfile_path` and `[wallet]keyfile`
The configs are located in `~/.config/cic/cli/config/`
```
# Local
cic wizard ./somewhere -c ~/.config/cic/cli/config/docker
# Test Net ```shell
cic wizard ./somewhere -c ~/.config/cic/cli/config/testnet python setup.py sdist
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 dist/cic-<VERSION>.tar.gz
``` ```
### Modular
## Structure of the components
![image](./doc/sphinx/components.svg)
CIC-CLI is designed to interface any network type backend. The current
state of the package contains interface to EVM only. Thus, the examples
below are limited to the context of the EVM.
## Preparing for EVM token deployment
Some of the concepts described below assume familiarity with base Some of the concepts described below assume familiarity with base
concepts of the CIC architecture. Please refer to the appropriate concepts of the CIC architecture. Please refer to the appropriate
documentation for more information. documentation for more information.
@ -36,66 +37,82 @@ To automatically fill in settings detected in the network for the EVM:
cic ext --registry <contract_registry_address> -d <settings_folder> -i <chain_spec> -p <rpc_endpoint> eth cic ext --registry <contract_registry_address> -d <settings_folder> -i <chain_spec> -p <rpc_endpoint> eth
``` ```
## Structure of the components
![image](./doc/sphinx/components.svg)
CIC-CLI is designed to interface any network type backend. The current
state of the package contains interface to EVM only. Thus, the examples
below are limited to the context of the EVM.
## Development ## Development
### Requirements
- Install [poetry](https://python-poetry.org/docs/#installation)
### Setup ### Setup
``` ```
poetry install -E eth python3 -m venv ./.venv
source ./.venv/bin/activate
pip install --extra-index-url https://pip.grassrootseconomics.net -r requirements.txt -r test_requirements.txt eth_requirements.txt
``` ```
### Running the CLI ### Running the CLI
```bash ```bash
poetry run cic -h python -m cic.runnable.cic_cmd -h # Help
``` ```
```bash ### Deploying Token
poetry run cic wizard ./somewhere -c ./config/docker
1. Generate Token Template
``` ```
### Importing a wallet from metamask python -m cic.runnable.cic_cmd init --target eth --name "Foo Token" --symbol FOO --precision 6 foo
- Export the accounts private key [Instructions](https://metamask.zendesk.com/hc/en-us/articles/360015289632-How-to-Export-an-Account-Private-Key)
- Save the private key to a file
- Run `eth-keyfile -k <file> > ~/.config/cic/keystore/keyfile.json`
### Port Forwarding
<details>
<summary>Install Kubectl</summary>
```bash
sudo apt-get update
sudo apt-get install -y apt-transport-https ca-certificates curl
sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg
echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list
sudo apt-get update
sudo apt-get install -y kubectl
``` ```
</details>
- Download testnet cluster config from https://cloud.digitalocean.com/kubernetes/clusters 2. Populating network.json
- Move the config to `$HOME/.kube/` Ea6225212005E86a4490018Ded4bf37F3E772161
- Run `kubectl -n grassroots --kubeconfig=$HOME/.kube/<config_file_name>.yaml get pods` ```
- Copy the name of the meta pod (e.g `cic-meta-server-67dc7c6468-8rhdq`) python -m cic.runnable.cic_cmd ext -p http://localhost:63545 -i evm:byzantium:8996:bloxberg --registry 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299 -d foo eth
- Port foward the meta pod to the local machine using `kubectl port-forward pods/<name_of_meta_pod> 6700:8000 -n grassroots --kubeconfig=$HOME/.kube/<config_file_name>.yaml` ```
- Clone this repository to your local machine
- Run `poetry install -E eth` in the repo root add eb3907ecad74a0013c259d5874ae7f22dcbcc95c from stack/apps/contact_migrations/keystore:address to foo.network.resources.eth.contents.\*.key_account
- Open `./cic/config/testnet/config.ini` and change
- [auth]keyfile_path Fill out proof.json
- [wallet]key_file
- Open a new terminal and run `poetry run cic wizard -c ./cic/config/testnet ./somewhere` ```
{
"description": "Smoking is bad for your health",
"issuer": "William Luke",
"namespace": "ge",
"proofs": [],
"version": 0
}
```
Fill out token.json
[eth-erc20](https://gitlab.com/cicnet/eth-erc20)
[erc20-demurrage-token](https://gitlab.com/cicnet/erc20-demurrage-token)
```
{
"code": "/home/will/grassroots/eth-erc20/python/giftable_erc20_token/data/GiftableToken.bin",
"extra": [
{
"arg": "",
"arg_type": ""
}
],
"name": "Foo Token",
"precision": "6",
"supply": 1000000000000000000000000,
"symbol": "FOO"
}
```
Deploy
0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
```
python -m cic.runnable.cic_cmd export --metadata-endpoint http://localhost:63380 -vv -y /home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore -o out -d foo eth
```
eth-contract-registry-list -e 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299 -u
Use TokenRegistery for e
eth-token-index-list -p http://localhost:63545 -i evm:byzantium:8996:bloxberg -e eb3907ecad74a0013c259d5874ae7f22dcbcc95c -u
### Tests ### Tests
``` ```
poetry run pytest bash ./run_tests.sh
``` ```

101
cic/MetaRequestHandler.py Normal file
View File

@ -0,0 +1,101 @@
from __future__ import annotations
# standard imports
import json
import logging
import os
from typing import TYPE_CHECKING, Dict, Union
from cic_types.condiments import MetadataPointer
from cic_types.ext.metadata.signer import Signer
from cic_types.processor import generate_metadata_pointer
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.http import HTTPSession
# local imports
# external imports
logg = logging.getLogger(__file__)
class Metadata:
"""
:cvar base_url: The base url or the metadata server.
:type base_url: str
"""
base_url = None
ctrl: CmdCtrl = None
class MetadataRequestsHandler(Metadata):
def __init__(
self,
cic_type: MetadataPointer,
identifier: bytes,
engine: str = "pgp",
):
""""""
logg.debug(f"ctrl: {self.ctrl}")
self.opener: HTTPSession = self.ctrl.remote_openers["meta"]
self.cic_type = cic_type
self.engine = engine
self.headers = {"X-CIC-AUTOMERGE": "server", "Content-Type": "application/json"}
self.identifier = identifier
if cic_type == MetadataPointer.NONE:
self.metadata_pointer = identifier.hex()
else:
self.metadata_pointer = generate_metadata_pointer(
identifier=self.identifier, cic_type=self.cic_type
)
if self.base_url:
self.url = os.path.join(self.base_url, self.metadata_pointer)
def create(self, data: Union[Dict, str]):
""""""
data = json.dumps(data).encode("utf-8")
result = self.opener.open(
method="POST", url=self.url, data=data, headers=self.headers
)
logg.debug(
f"url: {self.url}, data: {data}, headers: {self.headers}, result: {result}"
)
metadata = json.loads(result)
return self.edit(data=metadata)
def edit(self, data: Union[Dict, str]):
""""""
cic_meta_signer = Signer()
signature = cic_meta_signer.sign_digest(data=data)
algorithm = cic_meta_signer.get_operational_key().get("algo")
formatted_data = {
"m": json.dumps(data),
"s": {
"engine": self.engine,
"algo": algorithm,
"data": signature,
"digest": data.get("digest"),
},
}
formatted_data = json.dumps(formatted_data).encode("utf-8")
result = self.opener.open(
method="PUT", url=self.url, data=formatted_data, headers=self.headers
)
logg.info(f"signed metadata submission returned: {result}.")
try:
decoded_identifier = self.identifier.decode("utf-8")
except UnicodeDecodeError:
decoded_identifier = self.identifier.hex()
return result
def query(self):
""""""
result = self.opener.open(method="GET", url=self.url)
result_data = json.loads(result)
if not isinstance(result_data, dict):
raise ValueError(f"invalid result data object: {result_data}.")
return result

View File

@ -1 +1,2 @@
__version__ = "0.5.5" from .proof import Proof
from .processor import Processor

96
cic/actions/deploy.py Normal file
View File

@ -0,0 +1,96 @@
from __future__ import annotations
# standard imports
import importlib
import logging
import os
from typing import TYPE_CHECKING
# local imports
from cic import Processor, Proof
from cic.attachment import Attachment
from cic.meta import Meta, MetadataWriter
from cic.network import Network
from cic.output import HTTPWriter, KeyedWriterFactory
from cic.token import Token
# external imports
from cic.MetaRequestHandler import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner
logg = logging.getLogger(__name__)
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.actions.types import Options
def init_writers_from_config(config):
w = {
'meta': None,
'attachment': None,
'proof': None,
'ext': None,
}
for v in w.keys():
k = 'CIC_CORE_{}_WRITER'.format(v.upper())
(d, c) = config.get(k).rsplit('.', maxsplit=1)
m = importlib.import_module(d)
o = getattr(m, c)
w[v] = o
return w
def deploy(ctrl: CmdCtrl, target: str, contract_directory: str, keystore_directory: str, options: Options):
auth_passphrase=options.auth_passphrase,
auth_key_file_path=options.auth_keyfile_path,
metadata_endpoint=options.metadata_endpoint,
modname = f'cic.ext.{target}'
cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(ctrl.config)
output_directory = os.path.join(contract_directory, 'out')
output_writer_path_meta = output_directory
if metadata_endpoint != None:
MetadataRequestsHandler.base_url = metadata_endpoint
MetadataRequestsHandler.ctrl = ctrl
MetadataSigner.gpg_path = '/tmp'
MetadataSigner.key_file_path = auth_key_file_path # This is a p2p key for add data to meta
MetadataSigner.gpg_passphrase = auth_passphrase
writers['proof'] = KeyedWriterFactory(MetadataWriter, None).new
writers['attachment'] = KeyedWriterFactory(None, HTTPWriter).new
writers['meta'] = MetadataWriter
output_writer_path_meta = metadata_endpoint
ct = Token(path=contract_directory)
cm = Meta(path=contract_directory, writer=writers['meta'](path=output_writer_path_meta))
ca = Attachment(path=contract_directory, writer=writers['attachment'](path=output_writer_path_meta))
cp = Proof(path=contract_directory, attachments=ca, writer=writers['proof'](path=output_writer_path_meta))
cn = Network(path=contract_directory)
ca.load()
ct.load()
cp.load()
cm.load()
cn.load()
chain_spec = None
try:
chain_spec = ctrl.config.get('CHAIN_SPEC')
except KeyError:
chain_spec = cn.chain_spec
ctrl.config.add(chain_spec, 'CHAIN_SPEC', exists_ok=True)
logg.debug(f'CHAIN_SPEC config set to {str(chain_spec)}')
(rpc, signer) = cmd_mod.parse_adapter(ctrl.config, keystore_directory)
target_network_reference = cn.resource(target)
chain_spec = cn.chain_spec(target)
logg.debug(f'found reference {target_network_reference["contents"]} chain spec {chain_spec} for target {target}')
c = getattr(cmd_mod, 'new')(chain_spec, target_network_reference['contents'], cp, signer_hint=signer, rpc=rpc, outputs_writer=writers['ext'](path=output_directory))
c.apply_token(ct)
p = Processor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p.process()

28
cic/actions/types.py Normal file
View File

@ -0,0 +1,28 @@
from collections import namedtuple
Contract = namedtuple(
"Contract",
[
"token",
"proof",
"meta",
"attachment",
"network",
],
)
Options = namedtuple(
"Options",
[
"auth_db_path",
"auth_keyfile_path",
"auth_passphrase",
"contract_registry",
"key_account",
"chain_spec",
"rpc_provider",
"metadata_endpoint",
"wallet_keyfile",
"wallet_passphrase",
],
)

View File

@ -3,7 +3,7 @@ import logging
import os import os
# local imports # local imports
from cic.contract.base import Data, data_dir from .base import *
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -14,38 +14,37 @@ class Attachment(Data):
:param path: Path to settings directory :param path: Path to settings directory
:type path: str :type path: str
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__(self, path='.', writer=None):
def __init__(self, path=".", writer=None, interactive=False):
super(Attachment, self).__init__() super(Attachment, self).__init__()
self.contents = {} self.contents = {}
self.path = path self.path = path
self.writer = writer self.writer = writer
self.attachment_path = os.path.join(self.path, "attachments") self.attachment_path = os.path.join(self.path, 'attachments')
self.start()
if interactive:
input(
f"Please add attachment files to '{os.path.abspath(os.path.join(self.path,'attachments'))}' and then press ENTER to continue"
)
self.load()
def load(self): def load(self):
"""Loads attachment data from settings.""" """Loads attachment data from settings.
"""
for s in os.listdir(self.attachment_path): for s in os.listdir(self.attachment_path):
fp = os.path.realpath(os.path.join(self.attachment_path, s)) fp = os.path.realpath(os.path.join(self.attachment_path, s))
with open(fp, "rb") as f: f = open(fp, 'rb')
r = f.read() r = f.read()
f.close()
z = self.hash(r).hex() z = self.hash(r).hex()
self.contents[z] = fp self.contents[z] = fp
logg.debug(f"loaded attachment file {fp} digest {z}") logg.debug('loaded attachment file {} digest {}'.format(fp, z))
def start(self): def start(self):
"""Initialize attachment settings from template.""" """Initialize attachment settings from template.
"""
super(Attachment, self).start() super(Attachment, self).start()
os.makedirs(self.attachment_path, exist_ok=True) os.makedirs(self.attachment_path)
def get(self, k): def get(self, k):
"""Get a single attachment by the sha256 hash of the content. """Get a single attachment by the sha256 hash of the content.
@ -55,28 +54,33 @@ class Attachment(Data):
""" """
return self.contents[k] return self.contents[k]
def asdict(self): def asdict(self):
"""Output attachment state to dict""" """Output attachment state to dict
"""
return self.contents return self.contents
def process(self, token_address=None, token_symbol=None, writer=None): def process(self, token_address=None, token_symbol=None, writer=None):
"""Serialize and publish attachments. """Serialize and publish attachments.
See cic.processor.Processor.process See cic.processor.Processor.process
""" """
if writer == None: if writer == None:
writer = self.writer writer = self.writer
for key, value in self.contents.items(): for k in self.contents.keys():
fp = os.path.join(self.attachment_path, value) fp = os.path.join(self.attachment_path, self.contents[k])
with open(fp, "rb") as f: f = open(fp, 'rb')
data = f.read() v = f.read()
logg.debug(f"writing attachment {key}") f.close()
writer.write(key, data) logg.debug('writing attachment {}'.format(k))
writer.write(k, v)
def __str__(self): def __str__(self):
s = "" s = ''
for key, value in self.contents.items(): for k in self.contents.keys():
s += f"{key} = {value}\n" # self.digests[i].hex(), self.contents[i]) s += '{} = {}\n'.format(k, self.contents[k]) #self.digests[i].hex(), self.contents[i])
return s return s

74
cic/auth.py Normal file
View File

@ -0,0 +1,74 @@
# standard imports
import hashlib
import logging
import os
# external imports
import gnupg
# local imports
from cic.errors import AuthError
logg = logging.getLogger(__name__)
class PGPAuthCrypt:
typ = "gnupg"
def __init__(self, db_dir, auth_key, pgp_dir=None):
self.db_dir = db_dir
try:
bytes.fromhex(auth_key)
except TypeError:
raise AuthError(f"invalid key {auth_key}") from TypeError
except ValueError:
raise AuthError(f"invalid key {auth_key}") from ValueError
self.auth_key = auth_key
self.gpg = gnupg.GPG(gnupghome=pgp_dir)
self.secret = None
self.__passphrase = None
def get_secret(self, passphrase=""):
if passphrase is None:
passphrase = ""
p = os.path.join(self.db_dir, ".secret")
try:
f = open(p, "rb")
except FileNotFoundError:
h = hashlib.sha256()
h.update(bytes.fromhex(self.auth_key))
h.update(passphrase.encode("utf-8"))
z = h.digest()
secret = self.gpg.encrypt(z, [self.auth_key], always_trust=True)
if not secret.ok:
raise AuthError(f"could not encrypt secret for {self.auth_key}") from FileNotFoundError
d = os.path.dirname(p)
os.makedirs(d, exist_ok=True)
f = open(p, "wb")
f.write(secret.data)
f.close()
f = open(p, "rb")
secret = self.gpg.decrypt_file(f, passphrase=passphrase)
if not secret.ok:
raise AuthError("could not decrypt encryption secret. wrong password?")
f.close()
self.secret = secret.data
self.__passphrase = passphrase
def get_passphrase(self):
return self.__passphrase
def fingerprint(self):
return self.auth_key
def sign(self, plaintext, encoding, passphrase="", detach=True):
r = self.gpg.sign(plaintext, passphrase=passphrase, detach=detach)
if len(r.data) == 0:
raise AuthError("signing failed: " + r.status)
if encoding == "base64":
r = r.data
return r

View File

@ -3,7 +3,7 @@ import os
import hashlib import hashlib
mod_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..') mod_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.join(mod_dir, '..') root_dir = os.path.join(mod_dir, '..')
data_dir = os.path.join(mod_dir, 'data') data_dir = os.path.join(mod_dir, 'data')
schema_dir = os.path.join(mod_dir, 'schema') schema_dir = os.path.join(mod_dir, 'schema')

View File

@ -0,0 +1 @@
from cic.cmd.arg import CmdCtrl

221
cic/cmd/arg.py Normal file
View File

@ -0,0 +1,221 @@
# standard imports
import importlib
import logging
import os
import sys
# external imports
import chainlib.eth.cli
import cic.cmd.easy as cmd_easy
import cic.cmd.export as cmd_export
import cic.cmd.ext as cmd_ext
import cic.cmd.init as cmd_init
import cic.cmd.show as cmd_show
from chainlib.chain import ChainSpec
from cic.auth import PGPAuthCrypt
from cic.crypt.aes import AESCTREncrypt
from cic.http import HTTPSession, PGPClientSession
# local imports
from cic.notify import NotifyWriter
notifier = NotifyWriter()
logg = logging.getLogger(__name__)
script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(script_dir, "..", "data")
base_config_dir = os.path.join(data_dir, "config")
class NullWriter:
def notify(self, v):
pass
def ouch(self, v):
pass
def write(self, v):
sys.stdout.write(str(v))
class CmdCtrl:
__cmd_alias = {
"u": "user",
"t": "tag",
}
__auth_for = [
"user",
]
def __init__(self, *_args, argv=None, _description=None, logger=None, **_kwargs):
self.args(argv)
self.logging(logger)
self.module()
self.load_config()
self.notifier()
self.auth()
self.blockchain()
self.remote_openers = {}
if self.get("META_URL") is not None:
auth_client_session = PGPClientSession(self.__auth)
self.remote_openers["meta"] = HTTPSession(
self.get("META_URL"),
auth=auth_client_session,
origin=self.config.get("META_HTTP_ORIGIN"),
)
def blockchain(self):
self.chain_spec = ChainSpec.from_chain_str(self.config.get("CHAIN_SPEC"))
self.rpc = chainlib.eth.cli.Rpc()
self.__conn = self.rpc.connect_by_config(self.config)
def args(self, argv):
self.argparser = chainlib.eth.cli.ArgumentParser(
chainlib.eth.cli.argflag_std_read
)
sub = self.argparser.add_subparsers()
sub.dest = "command"
sub_init = sub.add_parser("init", help="initialize new cic data directory")
cmd_init.process_args(sub_init)
sub_show = sub.add_parser(
"show", help="display summary of current state of cic data directory"
)
cmd_show.process_args(sub_show)
sub_export = sub.add_parser(
"export", help="export cic data directory state to a specified target"
)
cmd_export.process_args(sub_export)
sub_ext = sub.add_parser("ext", help="extension helpers")
cmd_ext.process_args(sub_ext)
sub_easy = sub.add_parser("easy", help="Easy Mode Contract Deployment")
cmd_easy.process_args(sub_easy)
self.cmd_args = self.argparser.parse_args(argv)
def module(self):
self.cmd_string = self.cmd_args.command
cmd_string_translate = self.__cmd_alias.get(self.cmd_string)
if cmd_string_translate is not None:
self.cmd_string = cmd_string_translate
if self.cmd_string is None:
self.cmd_string = "none"
self.argparser.print_help()
exit(1)
modname = f"cic.cmd.{self.cmd_string}"
self.logger.debug(f"using module {modname}")
self.cmd_mod = importlib.import_module(modname)
def logging(self, logger):
self.logger = logger
if self.logger is None:
self.logger = logging.getLogger()
if self.cmd_args.vv:
self.logger.setLevel(logging.DEBUG)
elif self.cmd_args.v:
self.logger.setLevel(logging.INFO)
def load_config(self):
override_dir = self.cmd_args.config
if override_dir is None:
p = os.environ.get("HOME")
if p is not None:
p = os.path.join(p, ".config", "cic", "cli")
try:
os.stat(p)
override_dir = p
logg.info(
f"applying user config override from standard location: {p}"
)
except FileNotFoundError:
pass
extra_args = self.cmd_mod.extra_args()
self.config = chainlib.eth.cli.Config.from_args(
self.cmd_args,
base_config_dir=base_config_dir,
extra_args=extra_args,
default_config_dir=override_dir,
)
self.config.add(False, "_SEQ")
self.config.censor("AUTH_PASSPHRASE")
self.logger.debug(f"loaded config:\n{self.config}")
def auth(self):
typ = self.get("AUTH_TYPE")
if typ != "gnupg":
raise NotImplementedError("Valid aut implementations are: gnupg")
default_auth_db_path = None
if os.environ.get("HOME") is not None:
default_auth_db_path = os.path.join(
os.environ["HOME"], ".local/share/cic/clicada"
)
auth_db_path = self.get("AUTH_DB_PATH", default_auth_db_path)
self.__auth = PGPAuthCrypt(
auth_db_path, self.get("AUTH_KEY"), self.get("AUTH_KEYRING_PATH")
)
self.__auth.get_secret(self.get("AUTH_PASSPHRASE"))
self.encrypter = AESCTREncrypt(auth_db_path, self.__auth.secret)
logg.debug(f"loaded auth: {self.__auth}")
logg.debug(f"AUTH_PASSPHRASE: {self.get('AUTH_PASSPHRASE')}")
logg.debug(f"AUTH_KEY: {self.get('AUTH_KEY')}")
logg.debug(f"AUTH_DB_PATH: {self.get('AUTH_DB_PATH')}")
logg.debug(f"AUTH_KEYRING_PATH: {self.get('AUTH_KEYRING_PATH')}")
def get(self, k, default=None):
r = self.config.get(k, default)
if k in [
"_FORCE",
]:
if r is None:
return False
return self.config.true(k)
return r
def chain(self):
return self.chain_spec
def conn(self):
return self.__conn
def execute(self):
self.cmd_mod.execute(self)
def opener(self, k):
return self.remote_openers[k]
def notifier(self):
if logg.root.level >= logging.WARNING:
logging.disable()
self.writer = notifier
else:
self.writer = NullWriter()
def notify(self, v):
self.writer.notify(v)
def ouch(self, v):
self.writer.ouch(v)
print()
def write(self, v):
self.writer.write("")
self.writer.write(v)
print()

412
cic/cmd/easy.py Normal file
View File

@ -0,0 +1,412 @@
from __future__ import annotations
# standard import
import importlib
import json
import logging
import os
from typing import TYPE_CHECKING
import requests
# external imports
from chainlib.chain import ChainSpec
# local imports
from cic import Proof
from cic.actions.deploy import deploy
from cic.attachment import Attachment
from cic.meta import Meta
from cic.network import Network
from cic.token import Token
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
from cic.actions.types import Options, Contract
log = logging.getLogger(__name__)
def process_args(argparser):
argparser.add_argument(
"--skip-gen", action="store_true", default=False, help="Skip Generation"
)
argparser.add_argument(
"--skip-deploy",
action="store_true",
help="Skip Deployment",
)
argparser.add_argument(
"--target",
default="eth",
help="Contract Tech Target (eth)",
)
argparser.add_argument(
"path",
type=str,
help="Path to generate/use contract deployment info",
)
argparser.add_argument(
"-p",
type=str,
help="RPC Provider (http://localhost:8545)",
)
def extra_args():
return {
"path": "_TOKEN_PATH",
"skip_gen": "_TOKEN_SKIP_GEN",
"skip_deploy": "_TOKEN_SKIP_DEPLOY",
"target": "_TOKEN_TARGET",
"p": "RPC_PROVIDER",
}
def validate_args(_args):
pass
CONTRACTS = [
{
"url": "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken",
"name": "Giftable Token",
},
{
"url": "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/master/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap",
"name": "Demurrage Token Single No Cap",
},
]
# Download File from Url
def download_file(url: str, directory: str, filename=None) -> (str, bytes):
os.makedirs(directory, exist_ok=True)
filename = filename if filename else url.split("/")[-1]
path = os.path.join(directory, filename)
if not os.path.exists(path):
log.debug(f"Downloading {filename}")
r = requests.get(url, allow_redirects=True)
open(path, "wb").write(r.content)
return path
return path
def get_contract_args(data: list):
for item in data:
if item["type"] == "constructor":
return item["inputs"]
raise Exception("No constructor found in contract")
def print_contract_args(json_path: str):
json_data = json.load(open(json_path, encoding="utf-8"))
print("Contract Args:")
for contract_arg in get_contract_args(json_data):
print(
f"\t{contract_arg.get('name', '<no name>')} - {contract_arg.get('type', '<no type>')}"
)
def select_contract():
print("Contracts:")
print("\t C - Custom (path/url to contract)")
for idx, contract in enumerate(CONTRACTS):
print(f"\t {idx} - {contract['name']}")
val = input("Select contract (C,0,1..): ")
if val.isdigit() and int(val) < len(CONTRACTS):
contract = CONTRACTS[int(val)]
directory = f"./contracts/{contract['name']}"
bin_path = os.path.abspath(download_file(contract["url"] + ".bin", directory))
json_path = download_file(contract["url"] + ".json", directory)
elif val == "C":
possible_bin_location = input("Enter path/url to contract: ")
# possible_bin_location is path
if possible_bin_location[0] == "." or possible_bin_location[0] == "/":
if os.path.exists(possible_bin_location):
bin_path = os.path.abspath(possible_bin_location)
else:
raise Exception(f"File {possible_bin_location} does not exist")
possible_json_path = val.replace(".bin", ".json")
if os.path.exists(possible_json_path):
json_path = possible_json_path
# possible_bin_location is url
else:
bin_path = download_file(possible_bin_location, directory)
else:
print("Invalid selection")
exit(1)
contract_extra_args = []
contract_extra_args_types = []
if os.path.exists(json_path):
json_data = json.load(open(json_path, encoding="utf-8"))
for contract_arg in get_contract_args(json_data):
arg_name = contract_arg.get("name")
arg_type = contract_arg.get("type")
if arg_name not in ["_decimals", "_name", "_symbol"]:
val = input(f"Enter value for {arg_name} ({arg_type}): ")
contract_extra_args.append(val)
if arg_type == "uint128":
contract_extra_args_types.append("uint256")
else:
contract_extra_args_types.append(arg_type)
return {
"bin_path": bin_path,
"json_path": json_path,
"extra_args": contract_extra_args,
"extra_args_types": contract_extra_args_types,
}
def init_token(directory: str, code=""):
contract = select_contract()
code = contract["bin_path"]
contract_extra_args = contract["extra_args"]
contract_extra_args_types = contract["extra_args_types"]
name = input("Enter Token Name (Foo Token): ") or "Foo Token"
symbol = input("Enter Token Symbol (FOO): ") or "FOO"
precision = input("Enter Token Precision (6): ") or 6
supply = input("Enter Token Supply (0): ") or 0
contract_token = Token(
directory,
name=name,
symbol=symbol,
precision=precision,
extra_args=contract_extra_args,
extra_args_types=contract_extra_args_types,
supply=supply,
code=code,
)
contract_token.start()
return contract_token
def init_proof(directory):
description = input("Enter Proof Description (None): ") or None
namespace = input("Enter Proof Namespace (ge): ") or "ge"
issuer = input("Enter Proof Issuer (None): ") or None
contract_proof = Proof(directory, description, namespace, issuer)
contract_proof.start()
return contract_proof
def init_meta(directory):
name = input("Enter Name (None): ") or ""
country_code = input("Enter Country Code (KE): ") or "KE"
location = input("Enter Location (None): ") or ""
adding_contact_info = True
contact = {}
while adding_contact_info:
value = input("Enter contact info (e.g 'phone: +254723522718'): ") or None
if value:
data = value.split(":")
if len(data) != 2:
print("Invalid contact info, you must enter in the format 'key: value'")
continue
contact[data[0].strip()] = data[1].strip()
else:
adding_contact_info = False
contract_meta = Meta(
directory,
name=name,
country_code=country_code,
location=location,
contact=contact,
)
contract_meta.start()
return contract_meta
def init_attachment(directory):
contract_attchment = Attachment(directory)
contract_attchment.start()
input(
f"Please add attachment files to '{os.path.abspath(os.path.join(directory,'attachments'))}' and then press ENTER to continue"
)
contract_attchment.load()
return contract_attchment
def load_contract(directory) -> Contract:
token = Token(path=directory)
proof = Proof(path=directory)
meta = Meta(path=directory)
attachment = Attachment(path=directory)
network = Network(directory)
token.load()
proof.load()
meta.load()
attachment.load()
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def init_network(
directory,
options: Options,
targets=["eth"],
):
contract_network = Network(directory, targets=targets)
contract_network.start()
for target in targets:
m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start(
contract_network,
registry_address=options.contract_registry,
chain_spec=options.chain_spec,
rpc_provider=options.rpc_provider,
key_account_address=options.key_account,
)
contract_network.load()
return contract_network
def generate(directory: str, target: str, options: Options) -> Contract:
if os.path.exists(directory):
contine = input(
"Directory already exists, Would you like to delete it? (y/n): "
)
if contine.lower() != "y":
print("Exiting")
exit(1)
else:
print(f"Deleted {directory}")
os.system(f"rm -rf {directory}")
os.makedirs(directory)
token = init_token(directory)
proof = init_proof(directory)
meta = init_meta(directory)
attachment = init_attachment(directory)
network = init_network(
directory,
options,
targets=[target],
)
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def get_options(ctrl: CmdCtrl) -> Options:
# Defaults
default_contract_registry = ctrl.config.get(
"CIC_REGISTRY_ADDRESS",
"0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299", # Comes from /home/will/grassroots/cic-staff-installer/var/cic-staff-client/CIC_REGISTRY_ADDRESS
)
default_key_account = ctrl.config.get(
"AUTH_KEY",
"eb3907ecad74a0013c259d5874ae7f22dcbcc95c", # comes from wallet `eth-keyfile -z -d $WALLET_KEY_FILE`
)
# https://meta.grassrootseconomics.net
# https://auth.grassrootseconomics.net Authenticated Meta
default_metadata_endpoint = ctrl.config.get("META_URL", "https://auth.grassecon.net")
# Keyring folder needs to be dumped out as a private key file from $HOME/.config/cic/staff-client/.gnupg
default_wallet_keyfile = ctrl.config.get(
"WALLET_KEY_FILE",
"/home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc",
) # Show possible wallet keys
# Should be an input???
default_wallet_passphrase = ctrl.config.get("WALLET_PASSPHRASE", "merman")
default_chain_spec = ctrl.config.get("CHAIN_SPEC", "evm:byzantium:8996:bloxberg")
default_rpc_provider = ctrl.config.get(
"RPC_PROVIDER", "https://rpc.grassecon.net"
)
contract_registry = (
input(f"Enter Contract Registry ({default_contract_registry}): ")
or default_contract_registry
)
rpc_provider = (
input(f"Enter RPC Provider ({default_rpc_provider}): ") or default_rpc_provider
)
chain_spec = ChainSpec.from_chain_str(
(input(f"Enter ChainSpec ({default_chain_spec}): ") or default_chain_spec)
)
key_account = (
input(f"Enter KeyAccount ({default_key_account}): ") or default_key_account
)
metadata_endpoint = (
input(f"Enter Metadata Endpoint ({default_metadata_endpoint}): ")
or default_metadata_endpoint
)
auth_passphrase = ctrl.config.get(
"AUTH_PASSPHRASE"
)
auth_keyfile_path = ctrl.config.get(
"AUTH_KEYFILE_PATH"
)
auth_db_path = ctrl.config.get("AUTH_DB_PATH")
return Options(
auth_db_path,
auth_keyfile_path,
auth_passphrase,
contract_registry,
key_account,
chain_spec,
rpc_provider,
metadata_endpoint,
default_wallet_keyfile,
default_wallet_passphrase,
)
def print_contract(contract: Contract):
print(f"[cic.header]\nversion = {contract.proof.version()}\n")
print(f"[cic.token]\n{contract.token}")
print(f"[cic.proof]\n{contract.proof}")
print(f"[cic.meta]\n{contract.meta}")
print(f"[cic.attachment]\n{contract.attachment}")
print(f"[cic.network]\n{contract.network}")
def execute(ctrl: CmdCtrl):
directory = ctrl.config.get("_TOKEN_PATH")
target = ctrl.config.get("_TOKEN_TARGET")
skip_gen = ctrl.config.get("_TOKEN_SKIP_GEN")
skip_deploy = ctrl.config.get("_TOKEN_SKIP_DEPLOY")
options = get_options(ctrl)
if not skip_gen:
contract = generate(directory, target, options)
else:
contract = load_contract(directory)
print_contract(contract)
if not skip_deploy:
ready_to_deploy = input("Ready to deploy? (y/n): ")
if ready_to_deploy == "y":
deploy(
ctrl=ctrl,
contract_directory=directory,
options=options,
keystore_directory="/home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore", # Meta Signer meta.ge.net but not auth.ge.net(usumbufu determins if you can even interact with the server) and this ensures data integrity
target=target,
)
print("Deployed")
else:
print("Not deploying")
if __name__ == "__main__":
# execute()
print("Not Implemented")

View File

@ -1,51 +1,39 @@
# standard imports # standard imports
import importlib
import logging import logging
import importlib
import os import os
from typing import Optional
# external imports # external imports
from cic_types.ext.metadata import MetadataRequestsHandler from cic_types.ext.metadata import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner from cic_types.ext.metadata.signer import Signer as MetadataSigner
# local imports # local imports
from cic.contract.processor import ContractProcessor from cic import (
from cic.contract.components.proof import Proof Proof,
from cic.contract.components.attachment import Attachment Processor,
from cic.contract.components.meta import Meta )
from cic.contract.network import Network from cic.output import (
from cic.contract.components.token import Token HTTPWriter,
from cic.writers import HTTPWriter, KeyedWriterFactory, MetadataWriter KeyedWriterFactory,
)
from cic.meta import (
Meta,
MetadataWriter,
)
from cic.attachment import Attachment
from cic.network import Network
from cic.token import Token
from typing import Optional
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
def process_args(argparser): def process_args(argparser):
argparser.add_argument( argparser.add_argument('-d', '--directory', type=str, dest='directory', default='.', help='directory')
"-d", "--directory", type=str, dest="directory", default=".", help="directory" argparser.add_argument('-o', '--output-directory', type=str, dest='output_directory', help='output directory')
) argparser.add_argument('--metadata-endpoint', dest='metadata_endpoint', type=str, help='metadata endpoint to interact with')
argparser.add_argument( argparser.add_argument('-y', '--signer', type=str, dest='y', help='target-specific signer to use for export')
"-o", argparser.add_argument('-p', type=str, help='RPC endpoint')
"--output-directory", argparser.add_argument('target', type=str, help='target network type')
type=str,
dest="output_directory",
help="output directory",
)
argparser.add_argument(
"--metadata-endpoint",
dest="metadata_endpoint",
type=str,
help="metadata endpoint to interact with",
)
argparser.add_argument(
"-y",
"--signer",
type=str,
dest="y",
help="target-specific signer to use for export",
)
argparser.add_argument("-p", type=str, help="RPC endpoint")
argparser.add_argument("target", type=str, help="target network type")
def validate_args(args): def validate_args(args):
@ -54,32 +42,24 @@ def validate_args(args):
def init_writers_from_config(config): def init_writers_from_config(config):
w = { w = {
"meta": None, 'meta': None,
"attachment": None, 'attachment': None,
"proof": None, 'proof': None,
"ext": None, 'ext': None,
} }
for v in w.keys(): for v in w.keys():
k = "CIC_CORE_{}_WRITER".format(v.upper()) k = 'CIC_CORE_{}_WRITER'.format(v.upper())
(d, c) = config.get(k).rsplit(".", maxsplit=1) (d, c) = config.get(k).rsplit('.', maxsplit=1)
m = importlib.import_module(d) m = importlib.import_module(d)
o = getattr(m, c) o = getattr(m, c)
w[v] = o w[v] = o
return w return w
EArgs = {'target': str, 'directory': str, 'output_directory': str, 'metadata_endpoint': Optional[str], 'y': str}
ExtraArgs = { def execute(config, eargs: EArgs):
"target": str, modname = 'cic.ext.{}'.format(eargs.target)
"directory": str,
"output_directory": str,
"metadata_endpoint": Optional[str],
"y": str,
}
def execute(config, eargs: ExtraArgs):
modname = f"cic.ext.{eargs.target}"
cmd_mod = importlib.import_module(modname) cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config) writers = init_writers_from_config(config)
@ -87,26 +67,18 @@ def execute(config, eargs: ExtraArgs):
output_writer_path_meta = eargs.output_directory output_writer_path_meta = eargs.output_directory
if eargs.metadata_endpoint != None: if eargs.metadata_endpoint != None:
MetadataRequestsHandler.base_url = eargs.metadata_endpoint MetadataRequestsHandler.base_url = eargs.metadata_endpoint
MetadataSigner.gpg_path = os.path.join("/tmp") MetadataSigner.gpg_path = os.path.join('/tmp')
MetadataSigner.key_file_path = config.get("AUTH_KEYFILE_PATH") MetadataSigner.key_file_path = '/home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc'
MetadataSigner.gpg_passphrase = config.get("AUTH_PASSPHRASE") MetadataSigner.gpg_passphrase = 'merman'
writers["proof"] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new writers['proof'] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new
writers["attachment"] = KeyedWriterFactory(None, HTTPWriter).new writers['attachment'] = KeyedWriterFactory(None, HTTPWriter).new
writers["meta"] = MetadataWriter writers['meta'] = MetadataWriter
output_writer_path_meta = eargs.metadata_endpoint output_writer_path_meta = eargs.metadata_endpoint
ct = Token(path=eargs.directory) ct = Token(path=eargs.directory)
cm = Meta( cm = Meta(path=eargs.directory, writer=writers['meta'](path=output_writer_path_meta))
path=eargs.directory, writer=writers["meta"](path=output_writer_path_meta) ca = Attachment(path=eargs.directory, writer=writers['attachment'](path=output_writer_path_meta))
) cp = Proof(path=eargs.directory, attachments=ca, writer=writers['proof'](path=output_writer_path_meta))
ca = Attachment(
path=eargs.directory, writer=writers["attachment"](path=output_writer_path_meta)
)
cp = Proof(
path=eargs.directory,
attachments=ca,
writer=writers["proof"](path=output_writer_path_meta),
)
cn = Network(path=eargs.directory) cn = Network(path=eargs.directory)
ca.load() ca.load()
@ -117,29 +89,20 @@ def execute(config, eargs: ExtraArgs):
chain_spec = None chain_spec = None
try: try:
chain_spec = config.get("CHAIN_SPEC") chain_spec = config.get('CHAIN_SPEC')
except KeyError: except KeyError:
chain_spec = cn.chain_spec chain_spec = cn.chain_spec
config.add(chain_spec, "CHAIN_SPEC", exists_ok=True) config.add(chain_spec, 'CHAIN_SPEC', exists_ok=True)
logg.debug(f"CHAIN_SPEC config set to {str(chain_spec)}") logg.debug('CHAIN_SPEC config set to {}'.format(str(chain_spec)))
# signer = cmd_mod.parse_signer(eargs.y) #signer = cmd_mod.parse_signer(eargs.y)
(rpc, signer) = cmd_mod.parse_adapter(config, eargs.y) (rpc, signer) = cmd_mod.parse_adapter(config, eargs.y)
ref = cn.resource(eargs.target) ref = cn.resource(eargs.target)
chain_spec = cn.chain_spec(eargs.target) chain_spec = cn.chain_spec(eargs.target)
logg.debug( logg.debug('found reference {} chain spec {} for target {}'.format(ref['contents'], chain_spec, eargs.target))
f"found reference {ref['contents']} chain spec {chain_spec} for target {eargs.target}" c = getattr(cmd_mod, 'new')(chain_spec, ref['contents'], cp, signer_hint=signer, rpc=rpc, outputs_writer=writers['ext'](path=eargs.output_directory))
)
c = getattr(cmd_mod, "new")(
chain_spec,
ref["contents"],
cp,
signer_hint=signer,
rpc=rpc,
outputs_writer=writers["ext"](path=eargs.output_directory),
)
c.apply_token(ct) c.apply_token(ct)
p = ContractProcessor(proof=cp, attachment=ca, metadata=cm, extensions=[c]) p = Processor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p.process() p.process()

View File

@ -3,21 +3,16 @@ import importlib
# external imports # external imports
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
# local imports # local imports
from cic.contract.network import Network from cic.network import Network
def process_args(argparser): def process_args(argparser):
argparser.add_argument( argparser.add_argument('--registry', required=True, type=str, help='contract registry address')
"--registry", type=str, help="contract registry address" argparser.add_argument('-d', '--directory', type=str, dest='directory', default='.', help='directory')
) argparser.add_argument('-p', type=str, help='RPC endpoint')
argparser.add_argument( argparser.add_argument('-i', type=str, help='chain spec string')
"-d", "--directory", type=str, dest="directory", default=".", help="directory" argparser.add_argument('target', help='target to initialize')
)
argparser.add_argument("-p", type=str, help="RPC endpoint")
argparser.add_argument("-i", type=str, help="chain spec string")
argparser.add_argument("target", help="target to initialize")
def validate_args(args): def validate_args(args):
@ -28,11 +23,8 @@ def execute(config, eargs):
cn = Network(eargs.directory, targets=eargs.target) cn = Network(eargs.directory, targets=eargs.target)
cn.load() cn.load()
chain_spec = ChainSpec.from_chain_str(eargs.i or config.get("CHAIN_SPEC"))
m = importlib.import_module(f"cic.ext.{eargs.target}.start") chain_spec = ChainSpec.from_chain_str(eargs.i)
m.extension_start( m = importlib.import_module(f'cic.ext.{eargs.target}.start')
cn, m.extension_start(cn, registry_address=eargs.registry, chain_spec=chain_spec, rpc_provider=config.get('RPC_PROVIDER'))
registry_address=eargs.registry or config.get("CIC_REGISTRY_ADDRESS"),
chain_spec=chain_spec,
rpc_provider=config.get("RPC_PROVIDER"),
) # TODO add key account address

View File

@ -3,27 +3,21 @@ import logging
import os import os
# local imports # local imports
from cic.contract.components.proof import Proof from cic import Proof
from cic.contract.components.meta import Meta from cic.meta import Meta
from cic.contract.components.attachment import Attachment from cic.attachment import Attachment
from cic.contract.network import Network from cic.network import Network
from cic.contract.components.token import Token from cic.token import Token
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
def process_args(argparser): def process_args(argparser):
argparser.add_argument( argparser.add_argument('--target', action='append', type=str, default=[], help='initialize network specification file with target')
"--target", argparser.add_argument('--name', type=str, help='token name')
action="append", argparser.add_argument('--symbol', type=str, help='token symbol')
type=str, argparser.add_argument('--precision', type=str, help='token unit precision')
default=[], argparser.add_argument('directory', help='directory to initialize')
help="initialize network specification file with target",
)
argparser.add_argument("--name", type=str, help="token name")
argparser.add_argument("--symbol", type=str, help="token symbol")
argparser.add_argument("--precision", type=str, help="token unit precision")
argparser.add_argument("directory", help="directory to initialize")
def validate_args(args): def validate_args(args):
@ -31,13 +25,11 @@ def validate_args(args):
def execute(config, eargs): def execute(config, eargs):
logg.info("initializing in {}".format(eargs.directory)) logg.info('initializing in {}'.format(eargs.directory))
os.makedirs(eargs.directory) os.makedirs(eargs.directory)
ct = Token( ct = Token(eargs.directory, name=eargs.name, symbol=eargs.symbol, precision=eargs.precision)
eargs.directory, name=eargs.name, symbol=eargs.symbol, precision=eargs.precision
)
cp = Proof(eargs.directory) cp = Proof(eargs.directory)
cm = Meta(eargs.directory) cm = Meta(eargs.directory)
ca = Attachment(eargs.directory) ca = Attachment(eargs.directory)

View File

@ -1,21 +1,14 @@
# local imports # local imports
from cic.contract.components.proof import Proof from cic import Proof
from cic.contract.components.meta import Meta from cic.meta import Meta
from cic.contract.components.attachment import Attachment from cic.attachment import Attachment
from cic.contract.network import Network from cic.network import Network
from cic.contract.components.token import Token from cic.token import Token
def process_args(argparser): def process_args(argparser):
argparser.add_argument("-f", "--file", type=str, help="add file") argparser.add_argument('-f', '--file', type=str, help='add file')
argparser.add_argument( argparser.add_argument('-d', '--directory', type=str, dest='directory', default='.', help='cic data directory')
"-d",
"--directory",
type=str,
dest="directory",
default=".",
help="cic data directory",
)
def validate_args(args): def validate_args(args):
@ -35,12 +28,8 @@ def execute(config, eargs):
ca.load() ca.load()
cn.load() cn.load()
print( print("""[cic.header]
"""[cic.header] version = {}\n""".format(cp.version()))
version = {}\n""".format(
cp.version()
)
)
print("[cic.token]\n{}".format(ct)) print("[cic.token]\n{}".format(ct))
print("[cic.proof]\n{}".format(cp)) print("[cic.proof]\n{}".format(cp))
print("[cic.meta]\n{}".format(cm)) print("[cic.meta]\n{}".format(cm))

View File

@ -1,102 +0,0 @@
from __future__ import annotations
# standard import
import logging
import os
from chainlib.cli.config import Config
# local imports
from cic.contract.contract import deploy_contract, generate_contract, load_contract
from cic.contract.csv import load_contract_from_csv
log = logging.getLogger(__name__)
def process_args(argparser):
argparser.add_argument(
"--skip-gen", action="store_true", default=False, help="Skip Generation"
)
argparser.add_argument(
"--skip-deploy",
action="store_true",
help="Skip Deployment",
)
argparser.add_argument(
"--csv",
help="Load Voucher from CSV",
)
argparser.add_argument(
"--target",
default="eth",
help="Contract Target (eth)",
)
argparser.add_argument(
"path",
type=str,
help="Path to generate/use contract deployment info",
)
argparser.add_argument(
"-p",
type=str,
help="RPC Provider (http://localhost:8545)",
)
argparser.add_argument(
"-y",
type=str,
help="Wallet Keystore",
)
def validate_args(_args):
pass
def execute(
config: Config,
eargs,
):
directory = eargs.path
target = eargs.target
skip_gen = eargs.skip_gen
skip_deploy = eargs.skip_deploy
wallet_keystore = eargs.y
csv_file = eargs.csv
if wallet_keystore:
config.add(wallet_keystore, "WALLET_KEY_FILE", exists_ok=True)
if skip_gen:
contract = load_contract(directory)
else:
if os.path.exists(directory):
raise Exception(f"Directory {directory} already exists")
if csv_file:
print(f"Generating from csv:{csv_file} to {directory}")
contract = load_contract_from_csv(config, directory, csv_file)
else:
print("Using Interactive Mode")
contract = generate_contract(directory, [target], config, interactive=True)
print(contract)
print(f"Meta: {config.get('META_URL')}")
print(f"ChainSpec: {config.get('CHAIN_SPEC', contract.network.chain_spec(target))}")
print(f"RPC: {config.get('RPC_PROVIDER')}\n")
if not skip_deploy:
ready_to_deploy = input("Are you ready to Deploy? (y/n): ")
if ready_to_deploy == "y":
deploy_contract(
config=config,
contract_directory=directory,
target=target,
)
print("Deployed")
else:
print("Skipping deployment")
if __name__ == "__main__":
# execute()
print("Not Implemented")

View File

@ -1,15 +0,0 @@
import os
import shutil
default_module_configs = os.path.join(os.path.dirname(os.path.realpath(__file__)), '.', 'configs')
def ensure_base_configs(config_dir: str):
"""
Ensure that the base configs are present.
"""
if not os.path.exists(config_dir):
os.makedirs(config_dir)
for f in os.listdir(default_module_configs):
if not os.path.exists(os.path.join(config_dir, f)):
shutil.copytree(os.path.join(default_module_configs, f), os.path.join(config_dir, f))

View File

@ -1,27 +0,0 @@
[cic_core]
meta_writer = cic.writers.KVWriter
attachment_writer = cic.writers.KVWriter
proof_writer = cic.writers.KVWriter
ext_writer = cic.writers.KVWriter
[cic]
registry_address = 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
[meta]
url = http://localhost:63380
http_origin =
auth_token =
[rpc]
provider = http://localhost:63545
[auth]
type = gnupg
keyfile_path = /home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc
passphrase = merman
[wallet]
key_file = /home/will/.config/cic/keystore
passphrase =
[chain]
spec = evm:byzantium:8996:bloxberg

View File

@ -1,28 +0,0 @@
[cic_core]
meta_writer = cic.writers.KVWriter
attachment_writer = cic.writers.KVWriter
proof_writer = cic.writers.KVWriter
ext_writer = cic.writers.KVWriter
[cic]
registry_address = 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
[meta]
url = http://localhost:8000
http_origin =
auth_token =
[rpc]
provider = http://localhost:8545
[auth]
type = gnupg
keyfile_path = /home/will/grassroots/cic-internal-integration/apps/cic-ussd/tests/data/pgp/privatekeys_meta.asc
passphrase = merman
[wallet]
key_file = /home/will/grassroots/cic-internal-integration/apps/contract-migration/keystore
passphrase =
[chain]
spec = evm:byzantium:8996:bloxberg

View File

@ -1,28 +0,0 @@
[cic_core]
meta_writer = cic.writers.KVWriter
attachment_writer = cic.writers.KVWriter
proof_writer = cic.writers.KVWriter
ext_writer = cic.writers.KVWriter
[cic]
registry_address = 0xe3e3431BF25b06166513019Ed7B21598D27d05dC
[meta]
url = https://meta.sarafu.network
http_origin =
auth_token =
[rpc]
provider = https://rpc.sarafu.network
[auth]
type = gnupg
keyfile_path =
passphrase =
[wallet]
key_file =
passphrase =
[chain]
spec = evm:kitabu:6060:sarafu

View File

@ -1,27 +0,0 @@
[cic_core]
meta_writer = cic.writers.KVWriter
attachment_writer = cic.writers.KVWriter
proof_writer = cic.writers.KVWriter
ext_writer = cic.writers.KVWriter
[cic]
registry_address = 0x47269C43e4aCcA5CFd09CB4778553B2F69963303
[meta]
url = https://meta.sarafu.network
http_origin =
auth_token =
[rpc]
provider = https://rpc.sarafu.network
[auth]
type = gnupg
keyfile_path =
passphrase =
[wallet]
key_file =
passphrase =
[chain]
spec = evm:kitabu:6060:sarafu

View File

@ -1,13 +0,0 @@
GITABLE_CONTRACT_URL = "https://gitlab.com/cicnet/eth-erc20/-/raw/master/python/giftable_erc20_token/data/GiftableToken"
DMR_CONTRACT_URL = "https://gitlab.com/cicnet/erc20-demurrage-token/-/raw/v0.1.1/python/erc20_demurrage_token/data/DemurrageTokenSingleNocap"
CONTRACT_URLS = [
{
"url": GITABLE_CONTRACT_URL,
"name": "Giftable Token",
},
{
"url": DMR_CONTRACT_URL,
"name": "Demurrage Token Single No Cap",
},
]

View File

@ -1,213 +0,0 @@
# Standard
import importlib
import logging
import os
from typing import List
# External imports
from chainlib.chain import ChainSpec
from chainlib.cli.config import Config
# Local Modules
from cic.contract.components.attachment import Attachment
from cic.contract.components.meta import Meta
from cic.contract.components.proof import Proof
from cic.contract.components.token import Token
from cic.contract.helpers import init_writers_from_config
from cic.contract.network import Network
from cic.contract.processor import ContractProcessor
from cic.writers import HTTPWriter, KeyedWriterFactory, MetadataWriter
from cic_types.ext.metadata import MetadataRequestsHandler
from cic_types.ext.metadata.signer import Signer as MetadataSigner
log = logging.getLogger(__name__)
class Contract:
""" """
def __init__(
self,
token: Token,
proof: Proof,
meta: Meta,
attachment: Attachment,
network: Network,
):
self.token = token
self.proof = proof
self.meta = meta
self.attachment = attachment
self.network = network
def __str__(self):
s = ""
s += f"\n[cic.header]\nversion = {self.proof.version()}\n\n"
s += f"[cic.token]\n{self.token}\n"
s += f"[cic.proof]\n{self.proof}\n"
s += f"[cic.meta]\n{self.meta}\n"
s += f"[cic.attachment]\n{self.attachment}\n"
s += f"[cic.network]\n{self.network}\n"
return s
def load_contract(directory) -> Contract:
token = Token(path=directory)
proof = Proof(path=directory)
meta = Meta(path=directory)
attachment = Attachment(path=directory)
network = Network(directory)
token.load()
proof.load()
meta.load()
attachment.load()
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def generate_contract(
directory: str, targets: List[str], config, interactive=True
) -> Contract:
os.makedirs(directory)
log.info("Generating token")
token = Token(directory, interactive=interactive)
token.start()
log.info("Generating proof")
proof = Proof(directory, interactive=interactive)
proof.start()
log.info("Generating meta")
meta = Meta(directory, interactive=interactive)
meta.start()
log.info("Generating attachment")
attachment = Attachment(directory, interactive=interactive)
log.info("Generating network")
network = Network(directory, targets=targets)
network.start()
log.info(
f"""Populating infomation from network:
CIC_REGISTRY_ADDRESS: {config.get("CIC_REGISTRY_ADDRESS")}
CHAIN_SPEC: {config.get("CHAIN_SPEC")}
RPC_PROVIDER: {config.get("RPC_PROVIDER")}
"""
)
for target in targets:
# TODO Clean this up
modname = f"cic.ext.{target}"
cmd_mod = importlib.import_module(modname)
signer_hint = config.get("WALLET_KEY_FILE")
keys = cmd_mod.list_keys(config, signer_hint)
if len(keys) > 1:
print("More than one key found, please select one:")
for idx, key in enumerate(keys):
print(f"{idx} - {key} ")
selecting_key = True
while selecting_key:
idx = int(input("Select key: "))
if keys[idx] is not None:
key_account_address = keys[idx]
selecting_key = False
else:
print("Invalid key, try again")
else:
key_account_address = keys[0]
m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start(
network,
registry_address=config.get("CIC_REGISTRY_ADDRESS"),
chain_spec=ChainSpec.from_chain_str(config.get("CHAIN_SPEC")),
rpc_provider=config.get("RPC_PROVIDER"),
key_account_address=key_account_address,
)
network.load()
return Contract(
token=token, proof=proof, meta=meta, attachment=attachment, network=network
)
def deploy_contract(
config: Config,
target: str,
contract_directory: str,
):
modname = f"cic.ext.{target}"
cmd_mod = importlib.import_module(modname)
writers = init_writers_from_config(config)
output_directory = os.path.join(contract_directory, "out")
output_writer_path_meta = output_directory
metadata_endpoint = config.get("META_URL")
metadata_auth_token = config.get("META_AUTH_TOKEN")
headers = {"Authorization": f"Basic {metadata_auth_token}"}
if metadata_endpoint is not None:
MetadataRequestsHandler.base_url = metadata_endpoint
MetadataRequestsHandler.auth_token = metadata_auth_token
MetadataSigner.gpg_path = "/tmp"
MetadataSigner.key_file_path = config.get("AUTH_KEYFILE_PATH")
MetadataSigner.gpg_passphrase = config.get("AUTH_PASSPHRASE")
writers["proof"] = KeyedWriterFactory(MetadataWriter, HTTPWriter).new
writers["attachment"] = KeyedWriterFactory(None, HTTPWriter).new
writers["meta"] = MetadataWriter
output_writer_path_meta = metadata_endpoint
ct = Token(path=contract_directory)
cm = Meta(
path=contract_directory, writer=writers["meta"](path=output_writer_path_meta)
)
ca = Attachment(
path=contract_directory,
writer=writers["attachment"](path=output_writer_path_meta, headers=headers),
)
cp = Proof(
path=contract_directory,
attachments=ca,
writer=writers["proof"](path=output_writer_path_meta, headers=headers),
)
cn = Network(path=contract_directory)
ca.load()
ct.load()
cp.load()
cm.load()
cn.load()
chain_spec = None
try:
chain_spec = config.get("CHAIN_SPEC")
log.debug(f"using CHAIN_SPEC from config: {chain_spec}")
except KeyError:
chain_spec = cn.chain_spec
config.add(chain_spec, "CHAIN_SPEC", exists_ok=True)
log.debug(f"using CHAIN_SPEC: {str(chain_spec)} from network")
signer_hint = config.get("WALLET_KEY_FILE")
(rpc, signer) = cmd_mod.parse_adapter(config, signer_hint)
target_network_reference = cn.resource(target)
chain_spec = cn.chain_spec(target)
log.debug(
f'found reference {target_network_reference["contents"]} chain spec {chain_spec} for target {target}'
)
c = getattr(cmd_mod, "new")(
chain_spec,
target_network_reference["contents"],
cp,
signer_hint=signer,
rpc=rpc,
outputs_writer=writers["ext"](path=output_directory),
)
c.apply_token(ct)
p = ContractProcessor(proof=cp, attachment=ca, metadata=cm, extensions=[c])
p.process()

View File

@ -1,212 +0,0 @@
import csv
import importlib
import logging
import os
from enum import IntEnum
from pathlib import Path
from typing import List
from chainlib.chain import ChainSpec
from cic.contract.components.attachment import Attachment
from cic.contract.components.meta import Meta
from cic.contract.components.proof import Proof
from cic.contract.components.token import Token
from cic.contract.constants import DMR_CONTRACT_URL, GITABLE_CONTRACT_URL
from cic.contract.contract import Contract
from cic.contract.helpers import download_file
from cic.contract.network import Network
log = logging.getLogger(__name__)
CONTRACT_CSV_HEADER = [
"issuer",
"namespace",
"voucher_name",
"symbol",
"location",
"country_code",
"supply",
"precision",
"token_type",
"demurrage",
"period_minutes",
"phone_number",
"email_address",
"sink_account",
"description",
]
class CSV_Column(IntEnum):
issuer = 0
namespace = 1
voucher_name = 2
symbol = 3
location = 4
country_code = 5
supply = 6
precision = 7
token_type = 8
demurrage = 9
period_minutes = 10
phone_number = 11
email_address = 12
sink_account = 13
description = 14
def load_contracts_from_csv(config, directory, csv_path: str) -> List[Contract]:
targets = ["eth"]
os.makedirs(directory)
contract_rows = []
with open(csv_path, "rt", encoding="utf-8") as file:
csvreader = csv.reader(file, delimiter=",")
for idx, row in enumerate(csvreader):
if idx == 0:
if row != CONTRACT_CSV_HEADER:
raise Exception(
f'Seems you are using the wrong csv format. Expected the header to be: \n\t {", ".join(CONTRACT_CSV_HEADER)}'
)
continue
contract_rows.append(row)
contracts = []
for idx, contract_row in enumerate(contract_rows):
issuer = contract_row[CSV_Column.issuer]
namespace = contract_row[CSV_Column.namespace]
voucher_name = contract_row[CSV_Column.voucher_name]
symbol = contract_row[CSV_Column.symbol]
location = contract_row[CSV_Column.location]
country_code = contract_row[CSV_Column.country_code]
supply = contract_row[CSV_Column.supply]
precision = contract_row[CSV_Column.precision]
token_type = contract_row[CSV_Column.token_type]
demurrage = contract_row[CSV_Column.demurrage]
period_minutes = contract_row[CSV_Column.period_minutes]
phone_number = contract_row[CSV_Column.phone_number]
email_address = contract_row[CSV_Column.email_address]
sink_account = contract_row[CSV_Column.sink_account]
description = contract_row[CSV_Column.description]
if token_type == "demurrage":
bin_path = os.path.abspath(download_file(DMR_CONTRACT_URL + ".bin"))
log.info(f"Generating {token_type} contract for {issuer}")
token = Token(
directory,
name=voucher_name,
symbol=symbol,
precision=precision,
supply=supply,
extra_args=[demurrage, period_minutes, sink_account],
extra_args_types=["uint256", "uint256", "address"],
code=bin_path,
)
elif token_type == "giftable":
bin_path = os.path.abspath(download_file(GITABLE_CONTRACT_URL + ".bin"))
token = Token(
directory,
name=voucher_name,
symbol=symbol,
precision=precision,
supply=supply,
extra_args=[],
extra_args_types=[],
code=bin_path,
)
else:
raise Exception(
f"Only demurrage and gitable contracts currently supported at this time. {token_type} is not supported"
)
if token is None:
raise Exception(f"There was an issue building the contract")
token.start()
log.info("Generating proof")
proof = Proof(
directory,
attachments=None,
issuer=issuer,
description=description,
namespace=namespace,
)
proof.start()
log.info("Generating meta")
meta = Meta(
directory,
name=issuer,
contact={
"phone": phone_number,
"email": email_address,
},
country_code=country_code,
location=location,
)
meta.start()
log.info("Generating attachment")
attachment = Attachment(directory)
log.info("Generating network")
network = Network(directory, targets=targets)
network.start()
log.info(
f"""Populating infomation from network:
CIC_REGISTRY_ADDRESS: {config.get("CIC_REGISTRY_ADDRESS")}
CHAIN_SPEC: {config.get("CHAIN_SPEC")}
RPC_PROVIDER: {config.get("RPC_PROVIDER")}
"""
)
for target in targets:
# TODO Clean this up
modname = f"cic.ext.{target}"
cmd_mod = importlib.import_module(modname)
signer_hint = config.get("WALLET_KEY_FILE")
if signer_hint is None:
raise Exception("No Wallet Keyfile was provided")
keys = cmd_mod.list_keys(config, signer_hint)
if keys is None or len(keys) == 0:
raise Exception(f"No wallet keys found in {signer_hint}")
if len(keys) > 1:
log.warning(
f"More than one key found in the keystore. Using the first one\n - {keys[0]}"
)
key_account_address = keys[0]
m = importlib.import_module(f"cic.ext.{target}.start")
m.extension_start(
network,
registry_address=config.get("CIC_REGISTRY_ADDRESS"),
chain_spec=ChainSpec.from_chain_str(config.get("CHAIN_SPEC")),
rpc_provider=config.get("RPC_PROVIDER"),
key_account_address=key_account_address,
)
network.load()
contracts.append(
Contract(
token=token,
proof=proof,
meta=meta,
attachment=attachment,
network=network,
)
)
return contracts
def load_contract_from_csv(config, directory, csv_path: str) -> Contract:
path = Path(csv_path)
if path.is_file():
contracts = load_contracts_from_csv(config, directory, csv_path=csv_path)
if len(contracts) == 0:
raise Exception("No contracts found in CSV")
if len(contracts) > 1:
log.warning(
"Warning multiple contracts found in CSV. Only the first contract will be used"
)
else:
raise Exception("CSV file does not exist")
return contracts[0]

View File

@ -1,121 +0,0 @@
# standard imports
import hashlib
import importlib
import json
import logging
import os
import sys
import tempfile
from typing import Callable, TypedDict, Union
import requests
from cic.contract.constants import CONTRACT_URLS
# local imports
from cic.writers import WritersType
log = logging.getLogger(__name__)
# Download File from Url
def download_file(url: str, filename=None) -> str:
directory = tempfile.gettempdir()
filename = filename if filename else url.split("/")[-1]
log.debug(f"Downloading {filename}")
r = requests.get(url, allow_redirects=True)
content_hash = hashlib.md5(r.content).hexdigest()
path = os.path.join(directory, content_hash)
with open(path, "wb") as f:
f.write(r.content)
log.debug(f"{filename} downloaded to {path}")
return path
def get_contract_args(data: list):
for item in data:
if item["type"] == "constructor":
return item["inputs"]
raise Exception("No constructor found in contract")
def select_contract():
print("Contracts:")
print("\t C - Custom (path/url to contract)")
for idx, contract in enumerate(CONTRACT_URLS):
print(f"\t {idx} - {contract['name']}")
val = input("Select contract (C,0,1..): ")
if val.isdigit() and int(val) < len(CONTRACT_URLS):
contract = CONTRACT_URLS[int(val)]
bin_path = os.path.abspath(download_file(contract["url"] + ".bin"))
json_path = download_file(contract["url"] + ".json")
elif val == "C":
possible_bin_location = input("Enter a path or url to a contract.bin: ")
if possible_bin_location.startswith("http"):
# possible_bin_location is url
bin_path = download_file(possible_bin_location)
else:
# possible_bin_location is path
if os.path.exists(possible_bin_location):
bin_path = os.path.abspath(possible_bin_location)
else:
raise Exception(f"File {possible_bin_location} does not exist")
possible_json_path = val.replace(".bin", ".json")
if os.path.exists(possible_json_path):
json_path = possible_json_path
else:
print("Invalid selection")
sys.exit(1)
contract_extra_args = []
contract_extra_args_types = []
if os.path.exists(json_path):
with open(json_path, encoding="utf-8") as f:
json_data = json.load(f)
for contract_arg in get_contract_args(json_data):
arg_name = contract_arg.get("name")
arg_type = contract_arg.get("type")
if arg_name not in ["_decimals", "_name", "_symbol"]:
val = input(f"Enter value for {arg_name} ({arg_type}): ")
contract_extra_args.append(val)
if arg_type == "uint128":
contract_extra_args_types.append("uint256")
else:
contract_extra_args_types.append(arg_type)
return {
"bin_path": bin_path,
"json_path": json_path,
"extra_args": contract_extra_args,
"extra_args_types": contract_extra_args_types,
}
class Writers(TypedDict):
meta: Union[WritersType, Callable[..., WritersType]]
attachment: Callable[..., WritersType]
proof: Callable[..., WritersType]
ext: Union[WritersType, Callable[..., WritersType]]
def init_writers_from_config(config) -> Writers:
writers = {}
writer_keys = ["meta", "attachment", "proof", "ext"]
for key in writer_keys:
writer_config_name = f"CIC_CORE_{key.upper()}_WRITER"
(module_name, attribute_name) = config.get(writer_config_name).rsplit(
".", maxsplit=1
)
mod = importlib.import_module(module_name)
writer = getattr(mod, attribute_name)
writers[key] = writer
return Writers(
meta=writers["meta"],
attachment=writers["attachment"],
proof=writers["proof"],
ext=writers["ext"],
)

42
cic/crypt/aes.py Normal file
View File

@ -0,0 +1,42 @@
# standard imports
import os
import logging
import hashlib
from Crypto.Cipher import AES
from Crypto.Util import Counter
from .base import Encrypter
logg = logging.getLogger(__name__)
class AESCTREncrypt(Encrypter):
aes_block_size = 1 << 7
counter_bytes = int(128 / 8)
def __init__(self, db_dir, secret):
self.secret = secret
def key_to_iv(self, k):
h = hashlib.sha256()
h.update(k.encode('utf-8'))
h.update(self.secret)
z = h.digest()
return int.from_bytes(z[:self.counter_bytes], 'big')
def encrypt(self, k, v):
iv = self.key_to_iv(k)
ctr = Counter.new(self.aes_block_size, initial_value=iv)
cipher = AES.new(self.secret, AES.MODE_CTR, counter=ctr)
return cipher.encrypt(v)
def decrypt(self, k, v):
iv = self.key_to_iv(k)
ctr = Counter.new(self.aes_block_size, initial_value=iv)
cipher = AES.new(self.secret, AES.MODE_CTR, counter=ctr)
return cipher.decrypt(v)

8
cic/crypt/base.py Normal file
View File

@ -0,0 +1,8 @@
class Encrypter:
def encrypt(self, v):
raise NotImplementedError()
def decrypt(self, v):
raise NotImplementedError()

View File

@ -1,18 +1,23 @@
[cic_core] [cic_core]
meta_writer = cic.writers.KVWriter meta_writer = cic.output.KVWriter
attachment_writer = cic.writers.KVWriter attachment_writer = cic.output.KVWriter
proof_writer = cic.writers.KVWriter proof_writer = cic.output.KVWriter
ext_writer = cic.writers.KVWriter ext_writer = cic.output.KVWriter
[cic] [cic]
registry_address = 0xe3e3431BF25b06166513019Ed7B21598D27d05dC registry_address = 0xcf60ebc445b636a5ab787f9e8bc465a2a3ef8299
[meta] [meta]
url = https://meta.sarafu.network url = https://auth.grassecon.net
http_origin = http_origin =
auth_token =
[auth] [auth]
type = gnupg type = gnupg
keyfile_path = db_path = /home/will/.local/share/cic/clicada
passphrase = keyfile_path = /home/will/.config/cic/staff-client/user.asc
keyring_path = /home/will/.config/cic/staff-client/.gnupg
key = CCE2E1D2D0E36ADE0405E2D0995BB21816313BD5
passphrase =

View File

@ -2,5 +2,6 @@
"name": "", "name": "",
"location": "", "location": "",
"country_code": "", "country_code": "",
"contact": {} "contact": {
}
} }

6
cic/errors.py Normal file
View File

@ -0,0 +1,6 @@
class AuthError(Exception):
pass
class MetadataNotFoundError(Exception):
pass

View File

@ -1,41 +1,43 @@
# standard imports # standard imports
import logging
import copy import copy
import json import json
import logging
# external imports # external imports
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.address import is_address, to_checksum_address from chainlib.eth.tx import (
TxFormat,
TxFactory,
Tx,
receipt,
)
from chainlib.eth.connection import RPCConnection from chainlib.eth.connection import RPCConnection
from chainlib.eth.contract import ABIContractEncoder, ABIContractType from chainlib.eth.contract import (
ABIContractEncoder,
ABIContractType
)
from chainlib.eth.gas import OverrideGasOracle from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import Tx, TxFactory, TxFormat, receipt from chainlib.eth.address import (
is_address,
to_checksum_address,
)
from hexathon import add_0x
from eth_token_index import TokenUniqueSymbolIndex
from eth_address_declarator import Declarator from eth_address_declarator import Declarator
from eth_address_declarator.declarator import AddressDeclarator from eth_address_declarator.declarator import AddressDeclarator
from eth_token_index import TokenUniqueSymbolIndex
from giftable_erc20_token import GiftableToken from giftable_erc20_token import GiftableToken
from hexathon import add_0x, strip_0x
# local imports # local imports
from cic.ext.eth.rpc import parse_adapter, list_keys from cic.ext.eth.rpc import parse_adapter
from cic.extension import Extension from cic.extension import Extension
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
class CICEth(Extension): class CICEth(Extension):
def __init__(
self, def __init__(self, chain_spec, resources, proof, signer=None, rpc=None, outputs_writer=None, fee_oracle=None):
chain_spec,
resources,
proof,
signer=None,
rpc=None,
outputs_writer=None,
fee_oracle=None,
):
"""Implementation for the eth extension. """Implementation for the eth extension.
@ -52,25 +54,19 @@ class CICEth(Extension):
:param rpc: RPC adapter capable of submitting and querying the chain network node :param rpc: RPC adapter capable of submitting and querying the chain network node
:type rpc: chainlib.connection.RPCConnection :type rpc: chainlib.connection.RPCConnection
:param outputs_writer: Writer interface receiving the output of the processor :param outputs_writer: Writer interface receiving the output of the processor
:type outputs_writer: cic.writers.OutputWriter :type outputs_writer: cic.output.OutputWriter
:param fee_oracle: Fee oracle required by signer :param fee_oracle: Fee oracle required by signer
:type fee_oracle: chainlib.fee.FeeOracle :type fee_oracle: chainlib.fee.FeeOracle
""" """
super(CICEth, self).__init__( super(CICEth, self).__init__(chain_spec, resources, proof, signer=signer, rpc=rpc, outputs_writer=outputs_writer)
chain_spec,
resources,
proof,
signer=signer,
rpc=rpc,
outputs_writer=outputs_writer,
)
self.fee_oracle = fee_oracle self.fee_oracle = fee_oracle
self.tx_format = TxFormat.RAW_ARGS self.tx_format = TxFormat.RAW_ARGS
if self.rpc is not None: if self.rpc != None:
self.tx_format = TxFormat.JSONRPC self.tx_format = TxFormat.JSONRPC
elif self.signer is not None: elif self.signer != None:
self.tx_format = TxFormat.RLP_SIGNED self.tx_format = TxFormat.RLP_SIGNED
def __detect_arg_type(self, v): def __detect_arg_type(self, v):
typ = None typ = None
try: try:
@ -78,60 +74,60 @@ class CICEth(Extension):
typ = ABIContractType.UINT256 typ = ABIContractType.UINT256
except TypeError: except TypeError:
pass pass
if typ is None: if typ == None:
try: try:
vv = strip_0x(v) vv = strip_0x(v)
if is_address(vv): if is_address(vv):
typ = ABIContractType.ADDRESS typ = ABIContractType.ADDRESS
else: else:
typ = ABIContractType.BYTES32 typ = ABIContractType.BYTES32
except ValueError: except ValueError:
pass pass
if typ is None: if typ == None:
try: try:
v.encode("utf-8") v.encode('utf-8')
typ = ABIContractType.STRING typ = ABIContractType.STRING
except ValueError: except ValueError:
pass pass
if typ is None: if typ == None:
raise ValueError( raise ValueError('cannot automatically determine type for value {}'.format(v))
f"cannot automatically determine type for value {v}"
)
logg.info(f"argument {v} parsed as abi contract type {typ.value}") logg.info('argument {} parsed as abi contract type {}'.format(typ.value))
return typ return typ
def __order_args(self): def __order_args(self):
args = [ args = [
self.token_details["name"], self.token_details['name'],
self.token_details["symbol"], self.token_details['symbol'],
self.token_details["precision"], self.token_details['precision'],
] ]
args_types = [ args_types = [
ABIContractType.STRING.value, ABIContractType.STRING.value,
ABIContractType.STRING.value, ABIContractType.STRING.value,
ABIContractType.UINT256.value, ABIContractType.UINT256.value,
] ]
for i, x in enumerate(self.token_details["extra"]): for i, x in enumerate(self.token_details['extra']):
args.append(x) args.append(x)
typ = None typ = None
if self.token_details["extra_types"] is not None: if self.token_details['extra_types'] != None:
typ = self.token_details["extra_types"][i] typ = self.token_details['extra_types'][i]
else: else:
typ = self.__detect_arg_type(x) typ = self.__detect_arg_type(x)
args_types.append(typ) args_types.append(typ)
positions = self.token_details["positions"] positions = self.token_details['positions']
if positions is None: if positions == None:
positions = list(range(len(args))) positions = list(range(len(args)))
return (args, args_types, positions) return (args, args_types, positions)
def add_outputs(self, k, v): def add_outputs(self, k, v):
"""Adds given key/value pair to outputs array. """Adds given key/value pair to outputs array.
@ -140,9 +136,10 @@ class CICEth(Extension):
:param v: Output value :param v: Output value
:param v: bytes or str :param v: bytes or str
""" """
logg.debug(f"adding outputs {k} {v}") logg.debug('adding outputs {} {}'.format(k, v))
self.outputs.append((k, v)) self.outputs.append((k, v))
def get_outputs(self): def get_outputs(self):
"""Get wrapper for outputs captured from processing. """Get wrapper for outputs captured from processing.
@ -151,13 +148,14 @@ class CICEth(Extension):
""" """
return self.outputs return self.outputs
def process_token(self, writer=None): def process_token(self, writer=None):
"""Deploy token, and optionally mint token supply to token deployer account. """Deploy token, and optionally mint token supply to token deployer account.
:param writer: Writer interface receiving the output of the processor step :param writer: Writer interface receiving the output of the processor step
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
if writer is None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
(args, args_types, positions) = self.__order_args() (args, args_types, positions) = self.__order_args()
@ -165,189 +163,143 @@ class CICEth(Extension):
enc = ABIContractEncoder() enc = ABIContractEncoder()
for i in positions: for i in positions:
getattr(enc, args_types[i])(args[i]) getattr(enc, args_types[i])(args[i])
code = enc.get() code = enc.get()
if self.token_code is not None: if self.token_code != None:
code = self.token_code + code code = self.token_code + code
logg.debug(f"resource {self.resources}") logg.debug('resource {}'.format(self.resources))
signer_address = add_0x( signer_address = add_0x(to_checksum_address(self.resources['token']['key_account']))
to_checksum_address(self.resources["token"]["key_account"])
)
nonce_oracle = None nonce_oracle = None
if self.rpc is not None: if self.rpc != None:
nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc) nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc)
c = TxFactory( c = TxFactory(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle)
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=self.fee_oracle,
)
tx = c.template(signer_address, None, use_nonce=True) tx = c.template(signer_address, None, use_nonce=True)
tx = c.set_code(tx, code) tx = c.set_code(tx, code)
o = c.finalize(tx, self.tx_format) o = c.finalize(tx, self.tx_format)
token_address_tx = None token_address_tx = None
r = None r = None
if self.rpc is not None: if self.rpc != None:
r = self.rpc.do(o[1]) r = self.rpc.do(o[1])
token_address_tx = r token_address_tx = r
o = self.rpc.wait(r) o = self.rpc.wait(r)
o = Tx.src_normalize(o) o = Tx.src_normalize(o)
self.token_address = o["contract_address"] self.token_address = o['contract_address']
elif self.signer is not None: elif self.signer != None:
r = o[1] r = o[1]
token_address_tx = r token_address_tx = r
if r is None: if r == None:
r = code r = code
writer.write("token", r.encode("utf-8")) writer.write('token', r.encode('utf-8'))
writer.write("token_address", self.token_address.encode("utf-8")) writer.write('token_address', self.token_address.encode('utf-8'))
self.add_outputs("token", r) self.add_outputs('token', r)
if int(self.token_details["supply"]) > 0: if int(self.token_details['supply']) > 0:
c = GiftableToken( c = GiftableToken(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.fee_oracle)
self.chain_spec, o = c.mint_to(self.token_address, self.resources['token']['key_account'], self.resources['token']['key_account'], self.token_details['supply'])
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=self.fee_oracle,
)
o = c.mint_to(
self.token_address,
self.resources["token"]["key_account"],
self.resources["token"]["key_account"],
self.token_details["supply"],
)
r = None r = None
if self.rpc is not None: if self.rpc != None:
r = self.rpc.do(o[1]) r = self.rpc.do(o[1])
self.rpc.wait(r) self.rpc.wait(r)
writer.write("token_supply", r.encode("utf-8")) writer.write('token_supply', r.encode('utf-8'))
elif self.signer is not None: elif self.signer != None:
r = o[1] r = o[1]
writer.write( writer.write('token_supply', json.dumps(r).encode('utf-8'))
"token_supply", json.dumps(r, separators=(",", ":")).encode("utf-8")
)
else: else:
r = o r = o
writer.write("token_supply", r.encode("utf-8")) writer.write('token_supply', r.encode('utf-8'))
return token_address_tx return token_address_tx
def process_token_index(self, writer=None): def process_token_index(self, writer=None):
"""Register deployed token with token index. """Register deployed token with token index.
:param writer: Writer interface receiving the output of the processor step :param writer: Writer interface receiving the output of the processor step
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
if writer is None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
signer_address = add_0x( signer_address = add_0x(to_checksum_address(self.resources['token_index']['key_account']))
to_checksum_address(self.resources["token_index"]["key_account"]) contract_address = add_0x(to_checksum_address(self.resources['token_index']['reference']))
)
contract_address = add_0x(
to_checksum_address(self.resources["token_index"]["reference"])
)
gas_oracle = OverrideGasOracle( gas_oracle = OverrideGasOracle(limit=TokenUniqueSymbolIndex.gas(), conn=self.rpc)
limit=TokenUniqueSymbolIndex.gas(), conn=self.rpc
)
nonce_oracle = None nonce_oracle = None
if self.rpc is not None: if self.rpc != None:
nonce_oracle = RPCNonceOracle(add_0x(signer_address), conn=self.rpc) nonce_oracle = RPCNonceOracle(add_0x(signer_address), conn=self.rpc)
c = TokenUniqueSymbolIndex( c = TokenUniqueSymbolIndex(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
self.chain_spec,
signer=self.signer, o = c.register(contract_address, signer_address, self.token_address, tx_format=self.tx_format)
nonce_oracle=nonce_oracle,
gas_oracle=gas_oracle,
)
o = c.register(
contract_address,
signer_address,
self.token_address,
tx_format=self.tx_format,
)
r = None r = None
if self.rpc is not None: if self.rpc != None:
r = self.rpc.do(o[1]) r = self.rpc.do(o[1])
self.rpc.wait(r) self.rpc.wait(r)
elif self.signer is not None: elif self.signer != None:
r = o[1] r = o[1]
else: else:
r = o r = o
writer.write("token_index", r.encode("utf-8")) writer.write('token_index', r.encode('utf-8'))
self.add_outputs("token_index", r) self.add_outputs('token_index', r)
return r return r
def process_address_declarator(self, writer=None): def process_address_declarator(self, writer=None):
"""Register token proofs with address declarator. """Register token proofs with address declarator.
:param writer: Writer interface receiving the output of the processor step :param writer: Writer interface receiving the output of the processor step
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
if writer is None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
signer_address = add_0x( signer_address = add_0x(to_checksum_address(self.resources['address_declarator']['key_account']))
to_checksum_address(self.resources["address_declarator"]["key_account"]) contract_address = add_0x(to_checksum_address(self.resources['address_declarator']['reference']))
)
contract_address = add_0x(
to_checksum_address(self.resources["address_declarator"]["reference"])
)
gas_oracle = OverrideGasOracle(limit=AddressDeclarator.gas(), conn=self.rpc) gas_oracle = OverrideGasOracle(limit=AddressDeclarator.gas(), conn=self.rpc)
nonce_oracle = None nonce_oracle = None
if self.rpc is not None: if self.rpc != None:
nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc) nonce_oracle = RPCNonceOracle(signer_address, conn=self.rpc)
c = Declarator( c = Declarator(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
self.chain_spec,
signer=self.signer,
nonce_oracle=nonce_oracle,
gas_oracle=gas_oracle,
)
results = [] results = []
# (main_proof, all_proofs) = self.proof.get() #(main_proof, all_proofs) = self.proof.get()
# for proof in all_proofs: #for proof in all_proofs:
# logg.debug('proof {} '.format(proof)) #logg.debug('proof {} '.format(proof))
(k, v) = self.proof.root() (k, v) = self.proof.root()
fk = "address_declarator_" + k fk = 'address_declarator_' + k
o = c.add_declaration( o = c.add_declaration(contract_address, signer_address, self.token_address, k, tx_format=self.tx_format)
contract_address,
signer_address,
self.token_address,
k,
tx_format=self.tx_format,
)
r = None r = None
if self.rpc is not None: if self.rpc != None:
r = self.rpc.do(o[1]) r = self.rpc.do(o[1])
self.rpc.wait(r) self.rpc.wait(r)
elif self.signer is not None: elif self.signer != None:
r = o[1] r = o[1]
else: else:
r = o r = o
self.add_outputs(fk, r) self.add_outputs(fk, r)
results.append(r) results.append(r)
v = r.encode("utf-8") v = r.encode('utf-8')
if writer is not None: if writer != None:
writer.write(fk, v) writer.write(fk, v)
return results return results
def prepare_extension(self): def prepare_extension(self):
"""Sets token address for extension if defined in settings.""" """Sets token address for extension if defined in settings.
"""
super(CICEth, self).prepare_extension() super(CICEth, self).prepare_extension()
if self.token_address is not None: if self.token_address != None:
self.token_address = add_0x(to_checksum_address(self.token_address)) self.token_address = add_0x(to_checksum_address(self.token_address))
@ -356,11 +308,4 @@ def new(chain_spec, resources, proof, signer_hint=None, rpc=None, outputs_writer
See CICEth constructor for details. See CICEth constructor for details.
""" """
return CICEth( return CICEth(chain_spec, resources, proof, signer=signer_hint, rpc=rpc, outputs_writer=outputs_writer)
chain_spec,
resources,
proof,
signer=signer_hint,
rpc=rpc,
outputs_writer=outputs_writer,
)

View File

@ -1,19 +1,17 @@
# standard imports # standard imports
from getpass import getpass
import logging
import os
import stat import stat
import os
import logging
# external imports # external imports
from funga.eth.keystore.dict import DictKeystore from funga.eth.keystore.dict import DictKeystore
from funga.eth.signer import EIP155Signer from funga.eth.signer import EIP155Signer
from chainlib.cli import Wallet
from chainlib.eth.cli import Rpc from chainlib.eth.cli import Rpc
from chainlib.cli import Wallet
# local imports # local imports
from cic.keystore import KeystoreDirectory from cic.keystore import KeystoreDirectory
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -22,14 +20,12 @@ class EthKeystoreDirectory(DictKeystore, KeystoreDirectory):
TODO: Move to funga TODO: Move to funga
""" """
pass
def get_passphrase():
return getpass('Enter passphrase: ')
def parse_adapter(config, signer_hint): def parse_adapter(config, signer_hint):
"""Determine and instantiate signer and rpc from configuration. """Determine and instantiate signer and rpc from configuration.
If either could not be determined, None is returned. If either could not be determined, None is returned.
:param config: Configuration object implementing the get() method :param config: Configuration object implementing the get() method
@ -40,14 +36,14 @@ def parse_adapter(config, signer_hint):
:return: RPC interface, signer interface :return: RPC interface, signer interface
""" """
keystore = None keystore = None
if signer_hint is None: if signer_hint == None:
logg.info("signer hint missing") logg.info('signer hint missing')
return None return None
st = os.stat(signer_hint) st = os.stat(signer_hint)
if stat.S_ISDIR(st.st_mode): if stat.S_ISDIR(st.st_mode):
logg.debug("signer hint is directory") logg.debug('signer hint is directory')
keystore = EthKeystoreDirectory() keystore = EthKeystoreDirectory()
keystore.process_dir(signer_hint, password_retriever=get_passphrase) keystore.process_dir(signer_hint)
w = Wallet(EIP155Signer, keystore=keystore) w = Wallet(EIP155Signer, keystore=keystore)
signer = EIP155Signer(keystore) signer = EIP155Signer(keystore)
@ -55,17 +51,3 @@ def parse_adapter(config, signer_hint):
rpc.connect_by_config(config) rpc.connect_by_config(config)
return (rpc.conn, signer) return (rpc.conn, signer)
# TODO Find a better place for this
def list_keys(config, signer_hint):
keystore = None
if signer_hint is None:
logg.info("signer hint missing")
return None
st = os.stat(signer_hint)
if stat.S_ISDIR(st.st_mode):
logg.debug("signer hint is directory")
keystore = EthKeystoreDirectory()
keystore.process_dir(signer_hint, default_password=config.get('WALLET_PASSPHRASE', ''), password_retriever=get_passphrase)
return keystore.list()

View File

@ -9,14 +9,14 @@ def extension_start(network, *args, **kwargs):
:param network: Network object to read and write settings from :param network: Network object to read and write settings from
:type network: cic.network.Network :type network: cic.network.Network
""" """
CICRegistry.address = kwargs.get("registry_address") CICRegistry.address = kwargs["registry_address"]
key_account_address = kwargs.get("key_account_address")
RPCConnection.register_location(
kwargs.get("rpc_provider"), kwargs.get("chain_spec")
)
conn = RPCConnection.connect(kwargs.get("chain_spec"))
registry = CICRegistry(kwargs.get("chain_spec"), conn) key_account_address = kwargs["key_account_address"] or ""
RPCConnection.register_location(kwargs["rpc_provider"], kwargs["chain_spec"])
conn = RPCConnection.connect(kwargs["chain_spec"])
registry = CICRegistry(kwargs["chain_spec"], conn)
address_declarator = registry.by_name("AddressDeclarator") address_declarator = registry.by_name("AddressDeclarator")
network.resource_set( network.resource_set(

View File

@ -1,12 +1,13 @@
# standard imports # standard imports
import logging import logging
from typing import TYPE_CHECKING
# external imports # external imports
from hexathon import valid as valid_hex from hexathon import valid as valid_hex
# local imports # local imports
from cic.writers import StdoutWriter from cic.output import StdoutWriter
from cic.contract.components.token import Token from cic.token import Token
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -25,7 +26,7 @@ class Extension:
:param rpc: RPC adapter capable of submitting and querying the chain network node :param rpc: RPC adapter capable of submitting and querying the chain network node
:type rpc: chainlib.connection.RPCConnection :type rpc: chainlib.connection.RPCConnection
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__( def __init__(
@ -74,8 +75,8 @@ class Extension:
precision, precision,
code, code,
supply, supply,
extra=None, extra=[],
extra_types=None, extra_types=[],
positions=None, positions=None,
): ):
"""Initialize extension token data. """Initialize extension token data.
@ -105,8 +106,8 @@ class Extension:
"precision": precision, "precision": precision,
"code": code, "code": code,
"supply": supply, "supply": supply,
"extra": extra or [], "extra": extra,
"extra_types": extra_types or [], "extra_types": extra_types,
"positions": positions, "positions": positions,
} }
logg.debug(f"token details: {self.token_details}") logg.debug(f"token details: {self.token_details}")
@ -114,7 +115,7 @@ class Extension:
def prepare_extension(self): def prepare_extension(self):
"""Prepare extension for publishing (noop)""" """Prepare extension for publishing (noop)"""
pass
def parse_code_as_file(self, v): def parse_code_as_file(self, v):
"""Helper method to load application bytecode from file into extensions token data state. """Helper method to load application bytecode from file into extensions token data state.
@ -125,14 +126,16 @@ class Extension:
:type v: str :type v: str
""" """
try: try:
f = open(v, "r", encoding="utf-8") f = open(v, "r")
r = f.read() r = f.read()
f.close() f.close()
self.parse_code_as_hex(r) self.parse_code_as_hex(r)
except FileNotFoundError as e: except FileNotFoundError as e:
logg.debug(f"could not parse code as file: {e}") logg.debug("could not parse code as file: {}".format(e))
pass
except IsADirectoryError as e: except IsADirectoryError as e:
logg.debug(f"could not parse code as file: {e}") logg.debug("could not parse code as file: {}".format(e))
pass
def parse_code_as_hex(self, v): def parse_code_as_hex(self, v):
"""Helper method to load application bytecode from hex data into extension token data state. """Helper method to load application bytecode from hex data into extension token data state.
@ -145,7 +148,8 @@ class Extension:
try: try:
self.token_code = valid_hex(v) self.token_code = valid_hex(v)
except ValueError as e: except ValueError as e:
logg.debug(f"could not parse code as hex: {e}") logg.debug("could not parse code as hex: {}".format(e))
pass
def load_code(self, hint=None): def load_code(self, hint=None):
"""Attempt to load token application bytecode using token settings. """Attempt to load token application bytecode using token settings.
@ -164,10 +168,10 @@ class Extension:
self.parse_code_as_file, self.parse_code_as_file,
]: ]:
m(code) m(code)
if self.token_code is not None: if self.token_code != None:
break break
if self.token_code is None: if self.token_code == None:
raise RuntimeError("could not successfully parse token code") raise RuntimeError("could not successfully parse token code")
return self.token_code return self.token_code
@ -175,23 +179,22 @@ class Extension:
def process(self, writer=None): def process(self, writer=None):
"""Adapter used by Processor to process the extensions implementing the Extension base class. """Adapter used by Processor to process the extensions implementing the Extension base class.
Requires either token address or a valid token code reference to have been included in settings. Requires either token address or a valid token code reference to have been included in settings. If token address is not set, the token application code will be deployed.
If token address is not set, the token application code will be deployed.
:param writer: Writer to use for publishing. :param writer: Writer to use for publishing.
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
:rtype: tuple :rtype: tuple
:return: Token address, token symbol :return: Token address, token symbol
""" """
if writer is None: if writer == None:
writer = self.outputs_writer writer = self.outputs_writer
tasks = [] tasks = []
self.token_address = self.resources["token"]["reference"] self.token_address = self.resources["token"]["reference"]
# TODO: get token details when token address is not none # TODO: get token details when token address is not none
if self.token_address is None: if self.token_address == None:
if self.token_details["code"] is None: if self.token_details["code"] == None:
raise RuntimeError("neither token address nor token code has been set") raise RuntimeError("neither token address nor token code has been set")
self.load_code() self.load_code()
tasks.append("token") tasks.append("token")
@ -199,13 +202,13 @@ class Extension:
for k in self.resources.keys(): for k in self.resources.keys():
if k == "token": if k == "token":
continue continue
if self.resources[k]["reference"] is not None: if self.resources[k]["reference"] != None:
tasks.append(k) tasks.append(k)
self.prepare_extension() self.prepare_extension()
for task in tasks: for task in tasks:
logg.debug(f"extension adapter process {task}") logg.debug("extension adapter process {}".format(task))
_r = getattr(self, "process_" + task)(writer=writer) r = getattr(self, "process_" + task)(writer=writer)
return (self.token_address, self.token_details.get("symbol")) return (self.token_address, self.token_details.get("symbol"))

10
cic/hash.py Normal file
View File

@ -0,0 +1,10 @@
class Hasher:
def __basehasher(self, v):
h = hashlib.sha256()
h.update(v)
return h.digest()
def hash(self, v):
return self.__basehasher(v)

111
cic/http.py Normal file
View File

@ -0,0 +1,111 @@
# standard imports
import hashlib
import logging
import os
import ssl
import urllib.parse
from http.client import HTTPResponse
from socket import getservbyname
from urllib.request import HTTPSHandler
# external imports
from usumbufu.client.base import BaseTokenStore, ClientSession
from usumbufu.client.bearer import BearerClientSession
from usumbufu.client.hoba import HobaClientSession
logg = logging.getLogger(__name__)
class PGPClientSession(HobaClientSession):
alg = "969"
def __init__(self, auth):
self.auth = auth
self.origin = None
self.fingerprint = self.auth.fingerprint()
def sign_auth_challenge(self, plaintext, hoba, encoding):
passphrase = self.auth.get_passphrase()
r = self.auth.sign(plaintext, encoding, passphrase=passphrase, detach=True)
hoba.signature = r
return str(hoba)
def __str__(self):
return "clicada hoba/pgp auth"
def __repr__(self):
return "clicada hoba/pgp auth"
class HTTPSession:
token_dir = f"/run/user/{os.getuid()}/clicada/usumbufu/.token"
def __init__(self, url, auth=None, origin=None):
self.base_url = url
url_parts = urllib.parse.urlsplit(self.base_url)
url_parts_origin_host = url_parts[1].split(":")
host = url_parts_origin_host[0]
try:
host = host + ":" + url_parts_origin_host[1]
except IndexError:
host = host + ":" + str(getservbyname(url_parts[0]))
logg.info(
f"changed origin with missing port number from {url_parts[1]} to {host}"
)
url_parts_origin = (
url_parts[0],
host,
"",
"",
"",
)
self.origin = origin
if self.origin is None:
self.origin = urllib.parse.urlunsplit(url_parts_origin)
else:
logg.debug(f"overriding http origin for {url} with {self.origin}")
h = hashlib.sha256()
h.update(self.base_url.encode("utf-8"))
z = h.digest()
token_store_dir = os.path.join(self.token_dir, z.hex())
os.makedirs(token_store_dir, exist_ok=True)
self.token_store = BaseTokenStore(path=token_store_dir)
logg.debug(
f"token store: \n{self.token_store}\n origin: {self.origin}\n token_store_dir: {token_store_dir}\n"
)
self.session = ClientSession(self.origin, token_store=self.token_store)
bearer_handler = BearerClientSession(self.origin, token_store=self.token_store)
self.session.add_subhandler(bearer_handler)
if auth is not None:
auth.origin = self.origin
self.session.add_subhandler(auth)
ctx = ssl.create_default_context()
ctx.load_verify_locations(
capath="/home/will/grassroots/cic-staff-installer/keys/ge.ca"
)
https_handler = HTTPSHandler(context=ctx)
self.session.add_parent(parent=https_handler)
self.opener = urllib.request.build_opener(self.session)
def open(self, url, method=None, data: bytes = None, headers=None):
logg.debug(f"headers: {headers}")
logg.debug(f"token store: \n{self.token_store}\n origin: {self.origin}")
req = urllib.request.Request(url=url, data=data, headers=headers, method=method)
logg.debug(f"open {url} with opener {self}")
logg.debug(req.get_full_url())
logg.debug(f"handlers {self.opener.handlers}")
response: HTTPResponse = self.opener.open(req)
status = response.getcode()
logg.debug(f"{url} returned {status}")
return response.read().decode("utf-8")
def __str__(self):
return str(self.session)

View File

@ -23,9 +23,9 @@ class KeystoreDirectory(Keystore):
except IsADirectoryError: except IsADirectoryError:
pass pass
except KeyfileError as e: except KeyfileError as e:
logg.warning(f'file {fp} could not be parsed as keyfile: {e}') logg.warning('file {} could not be parsed as keyfile: {}'.format(fp, e))
except DecryptError as e: except DecryptError as e:
if password_retriever is None: if password_retriever == None:
raise e raise e
password = password_retriever() password = password_retriever()
self.import_keystore_file(fp, password=password) self.import_keystore_file(fp, password=password)

View File

@ -1,19 +1,28 @@
from __future__ import annotations from __future__ import annotations
# standard imports # standard imports
import os import base64
import json import json
import logging import logging
import os
# types
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from cic.cmd.arg import CmdCtrl
# external imports # external imports
from cic_types import MetadataPointer from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer from cic_types.processor import generate_metadata_pointer
from hexathon import strip_0x from hexathon import strip_0x
# local imports from cic.MetaRequestHandler import MetadataRequestsHandler
from cic.contract.base import Data, data_dir from cic.output import OutputWriter
from cic.utils import object_to_str from cic.utils import object_to_str
# local imports
from .base import Data, data_dir
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -25,11 +34,11 @@ class Meta(Data):
:param path: Path to settings directory :param path: Path to settings directory
:type path: str :type path: str
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__( def __init__(
self, path=".", writer=None, name="", location="", country_code="KE", contact={}, interactive=False self, path=".", writer=None, name="", location="", country_code="", contact={}
): ):
super(Meta, self).__init__() super(Meta, self).__init__()
self.name = name self.name = name
@ -40,25 +49,6 @@ class Meta(Data):
self.writer = writer self.writer = writer
self.meta_path = os.path.join(self.path, "meta.json") self.meta_path = os.path.join(self.path, "meta.json")
if interactive:
self.name = input(f"Enter Metadata Name ({self.name}): ") or self.name
self.country_code = input(f"Enter Metadata Country Code ({self.country_code}): ") or self.country_code
self.location = input(f"Enter Metadata Location ({self.location}): ") or self.location
adding_contact_info = True
contact = {}
while adding_contact_info:
value = input("Enter Metadata contact info (e.g 'phone: +254723522718'): ") or None
if value:
data = value.split(":")
if len(data) != 2:
print("Invalid contact info, you must enter in the format 'key: value'")
continue
contact[data[0].strip()] = data[1].strip()
else:
adding_contact_info = False
self.contact = contact
def load(self): def load(self):
"""Load metadata from settings.""" """Load metadata from settings."""
super(Meta, self).load() super(Meta, self).load()
@ -118,7 +108,7 @@ class Meta(Data):
if writer is None: if writer is None:
writer = self.writer writer = self.writer
v = json.dumps(self.asdict(), separators=(",", ":")) v = json.dumps(self.asdict())
token_address_bytes = bytes.fromhex(strip_0x(token_address)) token_address_bytes = bytes.fromhex(strip_0x(token_address))
k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META) k = generate_metadata_pointer(token_address_bytes, MetadataPointer.TOKEN_META)
@ -135,3 +125,25 @@ class Meta(Data):
def __str__(self): def __str__(self):
return object_to_str(self, ["name", "contact", "country_code", "location"]) return object_to_str(self, ["name", "contact", "country_code", "location"])
class MetadataWriter(OutputWriter):
"""Custom writer for publishing data under immutable content-addressed pointers in the cic-meta storage backend.
Data that is not utf-8 will be converted to base64 before publishing.
Implements cic.output.OutputWriter
"""
def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try:
v = v.decode("utf-8")
v = json.loads(v)
logg.debug(f"metadatawriter bindecode {k} {v}")
except UnicodeDecodeError:
v = base64.b64encode(v).decode("utf-8")
v = json.loads(json.dumps(v))
logg.debug(f"metadatawriter b64encode {k} {v}")
r = rq.create(v)
logg.info(f"metadata submitted at {k}")
return r

View File

@ -1,12 +1,16 @@
# standard imports # standard imports
import os
import json import json
import logging import logging
import os
# external imports # external imports
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
# local imports # local imports
from cic.contract.base import Data, data_dir from .base import (
Data,
data_dir,
)
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -34,8 +38,9 @@ class Network(Data):
""" """
super(Network, self).load() super(Network, self).load()
with open(self.network_path, 'r', encoding='utf-8') as f: f = open(self.network_path, 'r')
o = json.load(f) o = json.load(f)
f.close()
self.resources = o['resources'] self.resources = o['resources']
@ -51,8 +56,9 @@ class Network(Data):
network_template_file_path = os.path.join(data_dir, f'network_template_v{self.version()}.json') network_template_file_path = os.path.join(data_dir, f'network_template_v{self.version()}.json')
with open(network_template_file_path, encoding='utf-8') as f: f = open(network_template_file_path)
o_part = json.load(f) o_part = json.load(f)
f.close()
self.resources = {} self.resources = {}
for v in self.targets: for v in self.targets:
@ -64,10 +70,11 @@ class Network(Data):
def save(self): def save(self):
"""Save network settings to file. """Save network settings to file.
""" """
with open(self.network_path, 'w', encoding='utf-8') as f: f = open(self.network_path, 'w')
json.dump({ json.dump({
'resources': self.resources, 'resources': self.resources,
}, f, sort_keys=True, indent="\t") }, f, sort_keys=True, indent="\t")
f.close()
def resource(self, k): def resource(self, k):
@ -79,8 +86,8 @@ class Network(Data):
:return: Extension settings :return: Extension settings
""" """
v = self.resources.get(k) v = self.resources.get(k)
if v is None: if v == None:
raise AttributeError(f'No defined reference for {k}') raise AttributeError('no defined reference for {}'.format(k))
return v return v
@ -125,19 +132,17 @@ class Network(Data):
""" """
chain_spec_dict = chain_spec.asdict() chain_spec_dict = chain_spec.asdict()
for k in chain_spec_dict.keys(): for k in chain_spec_dict.keys():
logg.debug(f'resources: {self.resources}') logg.debug('resources {}'.format(self.resources))
self.resources[resource_key]['chain_spec'][k] = chain_spec_dict[k] self.resources[resource_key]['chain_spec'][k] = chain_spec_dict[k]
def __str__(self): def __str__(self):
s = '' s = ''
for resource in self.resources.keys(): for resource in self.resources.keys():
chainspec = ChainSpec.from_dict(self.resources[resource]['chain_spec'])
s += f'{resource}.chain_spec: {str(chainspec)}\n'
for content_key in self.resources[resource]['contents'].keys(): for content_key in self.resources[resource]['contents'].keys():
content_value = self.resources[resource]['contents'][content_key] content_value = self.resources[resource]['contents'][content_key]
if content_value is None: if content_value == None:
content_value = '' content_value = ''
s += f'{resource}.contents.{content_key} = {json.dumps(content_value, indent=4, sort_keys=True)}\n' s += f'{resource}.{content_key} = {content_value}\n'
return s return s

33
cic/notify.py Normal file
View File

@ -0,0 +1,33 @@
# standard imports
import os
import sys
import shutil
class NotifyWriter:
def __init__(self, writer=sys.stdout):
(c, r) = shutil.get_terminal_size()
self.cols = c
self.fmt = "\r{:" + "<{}".format(c) + "}"
self.w = writer
self.notify_max = self.cols - 4
def notify(self, v):
if len(v) > self.notify_max:
v = v[:self.notify_max]
self.write('\x1b[0;36m... ' + v + '\x1b[0;39m')
def ouch(self, v):
if len(v) > self.notify_max:
v = v[:self.notify_max]
self.write('\x1b[0;91m!!! ' + v + '\x1b[0;39m')
def write(self, v):
s = str(v)
if len(s) > self.cols:
s = s[:self.cols]
self.w.write(self.fmt.format(s))

96
cic/output.py Normal file
View File

@ -0,0 +1,96 @@
# standard imports
import os
import sys
import logging
import urllib.request
logg = logging.getLogger(__name__)
class OutputWriter:
def __init__(self, *args, **kwargs):
pass
def write(self, k, v, **kwargs):
raise NotImplementedError()
class StdoutWriter(OutputWriter):
def write(self, k, v):
sys.stdout.write('{}\t{}\n'.format(k, v))
class KVWriter(OutputWriter):
def __init__(self, path=None, *args, **kwargs):
try:
os.stat(path)
except FileNotFoundError:
os.makedirs(path)
self.path = path
def write(self, k, v):
fp = os.path.join(self.path, str(k))
logg.debug('path write {} {}'.format(fp, str(v)))
f = open(fp, 'wb')
f.write(v)
f.close()
class HTTPWriter(OutputWriter):
def __init__(self, path=None, *args, **kwargs):
super(HTTPWriter, self).__init__(*args, **kwargs)
self.path = path
def write(self, k, v):
path = self.path
if k != None:
path = os.path.join(path, k)
logg.debug(f'http writer post {path} \n key: {k}, value: {v}')
rq = urllib.request.Request(path, method='POST', data=v)
r = urllib.request.urlopen(rq)
logg.info('http writer submitted at {}'.format(r.read()))
class KeyedWriter(OutputWriter):
def __init__(self, writer_keyed, writer_immutable):
self.writer_keyed = writer_keyed
self.writer_immutable = writer_immutable
def write(self, k, v):
logg.debug('writing keywriter {} {}'.format(k, v))
if isinstance(v, str):
v = v.encode('utf-8')
if self.writer_keyed != None:
self.writer_keyed.write(k, v)
if self.writer_immutable != None:
self.writer_immutable.write(None, v)
class KeyedWriterFactory:
def __init__(self, key_writer_constructor, immutable_writer_constructor, *args, **kwargs):
self.key_writer_constructor = key_writer_constructor
self.immutable_writer_constructor = immutable_writer_constructor
self.x = {}
for k in kwargs.keys():
logg.debug('adding key {} t keyed writer factory'.format(k))
self.x[k] = kwargs[k]
def new(self, path=None, *args, **kwargs):
writer_keyed = None
writer_immutable = None
if self.key_writer_constructor != None:
writer_keyed = self.key_writer_constructor(path, **self.x)
if self.immutable_writer_constructor != None:
writer_immutable = self.immutable_writer_constructor(path, **self.x)
return KeyedWriter(writer_keyed, writer_immutable)

View File

@ -4,7 +4,7 @@ import logging
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
class ContractProcessor: class Processor:
"""Drives the serialization and publishing of contracts, proofs and metadata for the token. """Drives the serialization and publishing of contracts, proofs and metadata for the token.
:param proof: Proof object to publish :param proof: Proof object to publish
@ -14,7 +14,7 @@ class ContractProcessor:
:param metadata: Metadata object to publish :param metadata: Metadata object to publish
:type metadata: cic.meta.Meta :type metadata: cic.meta.Meta
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
:param extensions: Extension contexts to publish to :param extensions: Extension contexts to publish to
:type extensions: list of cic.extension.Extension :type extensions: list of cic.extension.Extension
""" """
@ -40,7 +40,7 @@ class ContractProcessor:
def writer(self): def writer(self):
"""Return the writer instance that the process is using. """Return the writer instance that the process is using.
:rtype: cic.writers.OutputWriter :rtype: cic.output.OutputWriter
:return: Writer :return: Writer
""" """
return self.__outputs_writer return self.__outputs_writer
@ -67,7 +67,7 @@ class ContractProcessor:
All output written to the publish writer will also be cached so that it subsequently be recalled using the get_outputs method. All output written to the publish writer will also be cached so that it subsequently be recalled using the get_outputs method.
:param writer: Writer to use for publishing. :param writer: Writer to use for publishing.
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
tasks = [ tasks = [
@ -77,14 +77,14 @@ class ContractProcessor:
] ]
for ext in self.extensions: for ext in self.extensions:
(token_address, token_symbol) = ext.process() # (token_address, token_symbol) = ext.process()
token_address="1a4b2d1B564456f07d5920FeEcdF86077F7bba1E"
token_symbol="WILLY"
for task in tasks: for task in tasks:
a = self.cores.get(task) a = self.cores.get(task)
if a is None: if a == None:
logg.debug(f'skipping missing task receiver "{task}"') logg.debug('skipping missing task receiver "{}"'.format(task))
continue continue
logg.debug(f'Processing "{ext}:{task}"')
v = a.process( v = a.process(
token_address=token_address, token_address=token_address,
token_symbol=token_symbol, token_symbol=token_symbol,

View File

@ -4,16 +4,16 @@ import logging
import os import os
import tempfile import tempfile
# external imports
from hexathon import strip_0x
from cic_types import MetadataPointer from cic_types import MetadataPointer
from cic_types.processor import generate_metadata_pointer from cic_types.processor import generate_metadata_pointer
# local imports # external imports
from cic.contract.base import Data, data_dir from hexathon import strip_0x
from cic.utils import object_to_str from cic.utils import object_to_str
# local imports
from .base import *
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
@ -23,27 +23,24 @@ class Proof(Data):
It processes inputs from the proof.json file in the session directory. It processes inputs from the proof.json file in the session directory.
Optionally, attachment objects can be added to the proof. If added, the resulting proof Optionally, attachment objects can be added to the proof. If added, the resulting proof digest will consists of the attachment digests added to the root digest. These are then are deterministically ordered, regardless of which order attachments were given to the constructor.
digest will consists of the attachment digests added to the root digest. These are then
are deterministically ordered, regardless of which order attachments were given to the constructor.
:param path: Path to settings directory :param path: Path to settings directory
:type path: str :type path: str
:param attachments: List of attachment objects to include in the proof :param attachments: List of attachment objects to include in the proof
:type attachments: cic.attachment.Attachment :type attachments: cic.attachment.Attachment
:param writer: Writer interface receiving the output of the processor :param writer: Writer interface receiving the output of the processor
:type writer: cic.writers.OutputWriter :type writer: cic.output.OutputWriter
""" """
def __init__( def __init__(
self, self,
path=".", path=".",
description="", description=None,
namespace="ge", namespace="ge",
issuer="", issuer=None,
attachments=None, attachments=None,
writer=None, writer=None,
interactive=False,
): ):
super(Proof, self).__init__() super(Proof, self).__init__()
self.proofs = [] self.proofs = []
@ -57,21 +54,11 @@ class Proof(Data):
self.proof_path = os.path.join(self.path, "proof.json") self.proof_path = os.path.join(self.path, "proof.json")
self.temp_proof_path = tempfile.mkstemp()[1] self.temp_proof_path = tempfile.mkstemp()[1]
if interactive:
self.description = (
input(f"Enter Proof Description ({self.description}): ")
or self.description
)
self.namespace = (
input(f"Enter Proof Namespace ({self.namespace}): ") or self.namespace
)
self.issuer = input(f"Enter Proof Issuer ({self.issuer}): ") or self.issuer
def load(self): def load(self):
"""Load proof data from settings.""" """Load proof data from settings."""
super(Proof, self).load() super(Proof, self).load()
f = open(self.proof_path, "r", encoding="utf-8") f = open(self.proof_path, "r")
o = json.load(f) o = json.load(f)
f.close() f.close()
@ -81,7 +68,7 @@ class Proof(Data):
self.issuer = o["issuer"] self.issuer = o["issuer"]
self.proofs = o["proofs"] self.proofs = o["proofs"]
if self.extra_attachments is not None: if self.extra_attachments != None:
a = self.extra_attachments.asdict() a = self.extra_attachments.asdict()
for k in a.keys(): for k in a.keys():
self.attachments[k] = a[k] self.attachments[k] = a[k]
@ -96,18 +83,18 @@ class Proof(Data):
super(Proof, self).start() super(Proof, self).start()
proof_template_file_path = os.path.join( proof_template_file_path = os.path.join(
data_dir, f"proof_template_v{self.version()}.json" data_dir, "proof_template_v{}.json".format(self.version())
) )
with open(proof_template_file_path, "r", encoding="utf-8") as f: f = open(proof_template_file_path)
o = json.load(f) o = json.load(f)
f.close()
o["issuer"] = self.issuer o["issuer"] = self.issuer
o["description"] = self.description o["description"] = self.description
o["namespace"] = self.namespace o["namespace"] = self.namespace
f = open(self.proof_path, "w")
with open(self.proof_path, "w", encoding="utf-8") as f: json.dump(o, f, sort_keys=True, indent="\t")
json.dump(o, f, sort_keys=True, indent="\t") f.close()
def asdict(self): def asdict(self):
"""Output proof state to dict.""" """Output proof state to dict."""
@ -135,10 +122,11 @@ class Proof(Data):
"""Calculate the root digest from the serialized proof object.""" """Calculate the root digest from the serialized proof object."""
v = self.asdict() v = self.asdict()
# b = cbor2.dumps(v) # b = cbor2.dumps(v)
b = json.dumps(v, separators=(",", ":")) b = json.dumps(v)
with open(self.temp_proof_path, "w", encoding="utf-8") as f: f = open(self.temp_proof_path, "w")
f.write(b) f.write(b)
f.close()
b = b.encode("utf-8") b = b.encode("utf-8")
k = self.hash(b) k = self.hash(b)
@ -150,7 +138,7 @@ class Proof(Data):
See cic.processor.Processor.process See cic.processor.Processor.process
""" """
if writer is None: if writer == None:
writer = self.writer writer = self.writer
(k, v) = self.root() (k, v) = self.root()
@ -185,8 +173,9 @@ class Proof(Data):
# writer.write(r_hex, hshs_cat) # writer.write(r_hex, hshs_cat)
o = self.asdict() o = self.asdict()
with open(self.proof_path, "w", encoding="utf-8") as f: f = open(self.proof_path, "w")
json.dump(o, f, sort_keys=True, indent="\t") json.dump(o, f, sort_keys=True, indent="\t")
f.close()
return root_key return root_key

View File

@ -1,97 +1,66 @@
# standard imports # standard imports
import importlib
import logging
import os import os
import logging
import argparse
import sys import sys
import importlib
# external imports # external imports
import chainlib.cli import chainlib.cli
import cic.cmd.export as cmd_export
import cic.cmd.ext as cmd_ext
# local imports
import cic.cmd.init as cmd_init import cic.cmd.init as cmd_init
import cic.cmd.show as cmd_show import cic.cmd.show as cmd_show
import cic.cmd.wizard as cmd_wizard import cic.cmd.ext as cmd_ext
from cic.config import ensure_base_configs import cic.cmd.export as cmd_export
import cic.cmd.easy as cmd_easy
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(script_dir, "..", "data") data_dir = os.path.join(script_dir, '..', 'data')
base_config_dir = os.path.join(data_dir, "config") base_config_dir = os.path.join(data_dir, 'config')
schema_dir = os.path.join(script_dir, "..", "schema") schema_dir = os.path.join(script_dir, '..', 'schema')
user_config_dir = os.path.join(
os.path.expanduser("~"), ".config", "cic", "cli", "config"
)
arg_flags = chainlib.cli.argflag_std_read | chainlib.cli.Flag.SEQ arg_flags = chainlib.cli.argflag_std_read | chainlib.cli.Flag.SEQ
argparser = chainlib.cli.ArgumentParser( argparser = chainlib.cli.ArgumentParser(env=os.environ, arg_flags=arg_flags, description='CIC cli tool for generating and publishing tokens')
env=os.environ,
arg_flags=arg_flags,
description="CIC cli tool for generating and publishing contracts",
)
sub = argparser.add_subparsers() sub = argparser.add_subparsers()
sub.dest = "command" sub.dest = 'command'
sub_init = sub.add_parser('init', help='initialize new cic data directory')
sub_init = sub.add_parser("init", help="initialize new cic data directory")
cmd_init.process_args(sub_init) cmd_init.process_args(sub_init)
sub_show = sub.add_parser('show', help='display summary of current state of cic data directory')
sub_show = sub.add_parser(
"show", help="display summary of current state of cic data directory"
)
cmd_show.process_args(sub_show) cmd_show.process_args(sub_show)
sub_export = sub.add_parser('export', help='export cic data directory state to a specified target')
sub_export = sub.add_parser(
"export", help="export cic data directory state to a specified target"
)
cmd_export.process_args(sub_export) cmd_export.process_args(sub_export)
sub_ext = sub.add_parser('ext', help='extension helpers')
sub_ext = sub.add_parser("ext", help="extension helpers")
cmd_ext.process_args(sub_ext) cmd_ext.process_args(sub_ext)
sub_wizard = sub.add_parser( sub_easy = sub.add_parser('easy', help='Easy Mode Contract Deployment')
"wizard", help="An interactive wizard for creating and publishing contracts" cmd_easy.process_args(sub_easy)
)
cmd_wizard.process_args(sub_wizard)
args = argparser.parse_args(sys.argv[1:]) args = argparser.parse_args(sys.argv[1:])
if args.command is None: if args.command == None:
logg.critical("Subcommand missing") logg.critical('Subcommand missing')
sys.stderr.write("\033[;91m" + "subcommand missing" + "\033[;39m\n") sys.stderr.write("\033[;91m" + 'subcommand missing' + "\033[;39m\n")
argparser.print_help(sys.stderr)
sys.exit(1) sys.exit(1)
modname = f"cic.cmd.{args.command}" modname = 'cic.cmd.{}'.format(args.command)
logg.debug(f"using module {modname}") logg.debug('using module {}'.format(modname))
cmd_mod = importlib.import_module(modname) cmd_mod = importlib.import_module(modname)
extra_args = { extra_args = {
"p": "RPC_PROVIDER", 'p': 'RPC_PROVIDER',
} }
ensure_base_configs(user_config_dir) config = chainlib.cli.Config.from_args(args, arg_flags=arg_flags, base_config_dir=base_config_dir, extra_args=extra_args)
def main(): def main():
default_config_dir = args.config or os.path.join(user_config_dir, "mainnet")
config = chainlib.cli.Config.from_args(
args,
arg_flags=arg_flags,
base_config_dir=base_config_dir,
extra_args=extra_args,
default_config_dir=default_config_dir,
)
try: try:
cmd_mod.execute(config, args) cmd_mod.execute(config, args)
except Exception as e: except Exception as e:
logg.exception(e) logg.exception(e) #'{}'.format(e))
sys.stderr.write("\033[;91m" + str(e) + "\033[;39m\n") sys.stderr.write("\033[;91m" + str(e) + "\033[;39m\n")
sys.exit(1) sys.exit(1)
if __name__ == '__main__':
if __name__ == "__main__":
main() main()

19
cic/runnable/test_cmd.py Normal file
View File

@ -0,0 +1,19 @@
# standard imports
import sys
import logging
# local imports
from cic.cmd import CmdCtrl
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
ctrl = CmdCtrl(argv=sys.argv[1:], logger=logg)
def main():
ctrl.execute()
if __name__ == '__main__':
main()

View File

@ -3,8 +3,7 @@ import json
import os import os
# local imports # local imports
from cic.contract.base import Data, data_dir from .base import Data, data_dir
from cic.contract.helpers import select_contract
class Token(Data): class Token(Data):
@ -29,18 +28,17 @@ class Token(Data):
def __init__( def __init__(
self, self,
path=".", path=".",
name="Foo Token", name=None,
symbol="FOO", symbol=None,
precision=6, precision=1,
supply=0, supply=0,
code=None, code=None,
extra_args=[], extra_args=[],
extra_args_types=[], extra_args_types=[],
interactive=False,
): ):
super(Token, self).__init__() super(Token, self).__init__()
self.name = name self.name = name
self.symbol = symbol.upper() self.symbol = symbol
self.supply = supply self.supply = supply
self.precision = precision self.precision = precision
self.code = code self.code = code
@ -49,27 +47,16 @@ class Token(Data):
self.path = path self.path = path
self.token_path = os.path.join(self.path, "token.json") self.token_path = os.path.join(self.path, "token.json")
if interactive:
contract = select_contract()
self.code = contract["bin_path"]
self.extra_args = contract["extra_args"]
self.extra_args_types = contract["extra_args_types"]
self.name = input(f"Enter Token Name ({self.name}): ") or self.name
self.symbol = input(f"Enter Token Symbol ({self.symbol}): ") or self.symbol
self.symbol = self.symbol.upper()
self.precision = input(f"Enter Token Precision ({self.precision}): ") or self.precision
self.supply = input(f"Enter Token Supply ({self.supply}): ") or self.supply
def load(self): def load(self):
"""Load token data from settings.""" """Load token data from settings."""
super(Token, self).load() super(Token, self).load()
with open(self.token_path, "r", encoding="utf-8") as f: f = open(self.token_path, "r")
o = json.load(f) o = json.load(f)
f.close()
self.name = o["name"] self.name = o["name"]
self.symbol = o["symbol"].upper() self.symbol = o["symbol"]
self.precision = o["precision"] self.precision = o["precision"]
self.code = o["code"] self.code = o["code"]
self.supply = o["supply"] self.supply = o["supply"]
@ -77,16 +64,13 @@ class Token(Data):
extra_types = [] extra_types = []
token_extras: list = o["extra"] token_extras: list = o["extra"]
if token_extras: if token_extras:
for idx, token_extra in enumerate(token_extras): for token_extra in token_extras:
arg = token_extra.get("arg") arg = token_extra.get("arg")
arg_type = token_extra.get("arg_type") arg_type = token_extra.get("arg_type")
if arg and arg_type: if arg:
extras.append(arg) extras.append(arg)
if arg_type:
extra_types.append(arg_type) extra_types.append(arg_type)
elif (arg and not arg_type) or (not arg and arg_type):
raise ValueError(
f"Extra contract args must have a 'arg' and 'arg_type', Please check {self.token_path}:extra[{idx}] "
)
self.extra_args = extras self.extra_args = extras
self.extra_args_types = extra_types self.extra_args_types = extra_types
self.inited = True self.inited = True
@ -96,27 +80,33 @@ class Token(Data):
super(Token, self).load() super(Token, self).load()
token_template_file_path = os.path.join( token_template_file_path = os.path.join(
data_dir, f"token_template_v{self.version()}.json" data_dir, "token_template_v{}.json".format(self.version())
) )
with open(token_template_file_path, encoding="utf-8") as f:
o = json.load(f) f = open(token_template_file_path)
o = json.load(f)
f.close()
o["name"] = self.name o["name"] = self.name
o["symbol"] = self.symbol.upper() o["symbol"] = self.symbol
o["precision"] = self.precision o["precision"] = self.precision
o["code"] = self.code o["code"] = self.code
o["supply"] = self.supply o["supply"] = self.supply
extra = [] extra = []
for idx, extra_arg in enumerate(self.extra_args): for i in range(len(self.extra_args)):
extra.append({"arg": extra_arg, "arg_type": self.extra_args_types[idx]}) extra.append(
if len(extra) != 0: {"arg": self.extra_args[i], "arg_type": self.extra_args_types[i]}
)
if len(extra):
o["extra"] = extra o["extra"] = extra
print(extra)
with open(self.token_path, "w", encoding="utf-8") as f: f = open(self.token_path, "w")
json.dump(o, f, sort_keys=True, indent="\t") json.dump(o, f, sort_keys=True, indent="\t")
f.close()
def __str__(self): def __str__(self):
s = f"name = {self.name}\n" s = f"name = {self.name}\n"
s += f"symbol = {self.symbol.upper()}\n" s += f"symbol = {self.symbol}\n"
s += f"precision = {self.precision}\n" s += f"precision = {self.precision}\n"
s += f"supply = {self.supply}\n" s += f"supply = {self.supply}\n"
for idx, extra in enumerate(self.extra_args): for idx, extra in enumerate(self.extra_args):

View File

@ -4,15 +4,15 @@ def object_to_str(obj, keys):
for key in keys: for key in keys:
value = eval("obj." + key) value = eval("obj." + key)
key = key.replace("()", "") key = key.replace("()", "")
if isinstance(value, str): if type(value) == str:
s += f"{key} = {value}\n" s += f"{key} = {value}\n"
elif isinstance(value, list): elif type(value) == list:
for idx, vv in enumerate(value): for idx, vv in enumerate(value):
if not vv: if not vv:
s += f"{key}[{idx}] = \n" s += f"{key}[{idx}] = \n"
continue continue
s += f"{key}[{idx}] = {vv}\n" s += f"{key}[{idx}] = {vv}\n"
elif isinstance(value, dict): elif type(value) == dict:
for vv_key in value.keys(): for vv_key in value.keys():
vv_value = value[vv_key] vv_value = value[vv_key]
if not vv_value: if not vv_value:

View File

@ -1,125 +0,0 @@
# standard imports
import base64
import json
import logging
import os
import sys
import urllib.request
from typing import Dict, Type, Union
from cic_types.ext.metadata import MetadataPointer, MetadataRequestsHandler
logg = logging.getLogger(__name__)
class OutputWriter:
def __init__(self, *args, **kwargs):
pass
def write(self, k, v):
raise NotImplementedError()
class StdoutWriter(OutputWriter):
def write(self, k, v):
sys.stdout.write(f"{k}\t{v}\n")
class KVWriter(OutputWriter):
def __init__(self, path=None, *args, **kwargs):
try:
os.stat(path)
except FileNotFoundError:
os.makedirs(path)
self.path = path
super().__init__(*args, **kwargs)
def write(self, k, v):
fp = os.path.join(self.path, str(k))
logg.debug(f"path write {fp} {str(v)}")
f = open(fp, "wb")
f.write(v)
f.close()
class HTTPWriter(OutputWriter):
def __init__(self, path=None, headers: Dict[str, str] = None, *args, **kwargs):
super(HTTPWriter, self).__init__(*args, **kwargs)
self.path = path
self.headers = headers
def write(self, k, v):
path = self.path
if k is not None:
path = os.path.join(path, k)
logg.debug(f"HTTPWriter POST {path} data: {v}, headers: {self.headers}")
rq = urllib.request.Request(path, method="POST", data=v, headers=self.headers)
r = urllib.request.urlopen(rq)
logg.info(f"http writer submitted at {r.read()}")
class KeyedWriter(OutputWriter):
def __init__(self, writer_keyed, writer_immutable):
self.writer_keyed = writer_keyed
self.writer_immutable = writer_immutable
super().__init__()
def write(self, k, v):
logg.debug(f"writing keywriter key: {k} value: {v}")
if isinstance(v, str):
v = v.encode("utf-8")
if self.writer_keyed is not None:
self.writer_keyed.write(k, v)
if self.writer_immutable is not None:
self.writer_immutable.write(None, v)
class KeyedWriterFactory:
def __init__(
self, key_writer_constructor, immutable_writer_constructor, *_args, **kwargs
):
self.key_writer_constructor = key_writer_constructor
self.immutable_writer_constructor = immutable_writer_constructor
self.x = {}
for k, v in kwargs.items():
logg.debug(f"adding key {k} t keyed writer factory")
self.x[k] = v
def new(self, path=None, headers: Dict[str, str] = None, *_args, **_kwargs):
writer_keyed = None
writer_immutable = None
if self.key_writer_constructor is not None:
writer_keyed = self.key_writer_constructor(path, **self.x)
if self.immutable_writer_constructor is not None:
writer_immutable = self.immutable_writer_constructor(
path, headers, **self.x
)
return KeyedWriter(writer_keyed, writer_immutable)
class MetadataWriter(OutputWriter):
"""Custom writer for publishing data under immutable content-addressed pointers in the cic-meta storage backend.
Data that is not utf-8 will be converted to base64 before publishing.
Implements cic.writers.OutputWriter
"""
def write(self, k, v):
rq = MetadataRequestsHandler(MetadataPointer.NONE, bytes.fromhex(k))
try:
v = v.decode("utf-8")
v = json.loads(v)
logg.debug(f"metadatawriter bindecode {k} {v}")
except UnicodeDecodeError:
v = base64.b64encode(v).decode("utf-8")
v = json.loads(json.dumps(v, separators=(",", ":")))
logg.debug(f"metadatawriter b64encode {k} {v}")
r = rq.create(v)
logg.info(f"metadata submitted at {k}")
return r
WritersType = Union[
Type[OutputWriter], Type[KeyedWriter], Type[MetadataWriter], Type[OutputWriter]
]

7
eth_requirements.txt Normal file
View File

@ -0,0 +1,7 @@
chainlib-eth~=0.0.21
funga-eth~=0.5.1
eth-token-index~=0.2.4
eth-address-index~=0.2.4
okota~=0.2.5a1
cic_eth_registry~=0.6.2
cic_contracts~=0.0.5

2920
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,99 +0,0 @@
[tool.poetry]
name = "cic-cli"
version = "0.5.5"
description = "Generic cli tooling for the CIC token network"
authors = [
"Louis Holbrook <dev@holbrook.no>",
"William Luke <williamluke4@gmail.com>",
]
license = "GPL-3.0-or-later"
readme = "README.md"
repository = "https://git.grassecon.net/cicnet/cic-cli"
classifiers = [
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Topic :: Internet",
]
keywords = ["dlt", "blockchain", "cryptocurrency"]
packages = [
{ include = "cic" },
{ include = "cic/runnable/*.py" },
{ include = "cic/ext/**/*.py" },
{ include = "cic/cmd/**/*.py" },
]
[tool.poetry.scripts]
cic = 'cic.runnable.cic_cmd:main'
[[tool.poetry.source]]
name = "grassroots_"
url = "https://pip.grassrootseconomics.net/"
default = false
secondary = true
[[tool.poetry.source]]
name = "pypi_"
url = "https://pypi.org/simple/"
default = true
secondary = false
[tool.poetry.dependencies]
python = "^3.8"
funga-eth = "^0.6.0"
cic-types = "^0.2.7"
confini = "^0.6.0"
chainlib = "~0.1.0"
cbor2 = "~5.4.1"
chainlib-eth = { version = "~0.1.1", optional = true }
eth-token-index = { version = "^0.3.0", optional = true }
eth-address-index = { version = "~0.5.0", optional = true }
okota = { version = "^0.4.0", optional = true }
cic-eth-registry = { version = "^0.6.9", optional = true }
cic-contracts = { version = "~0.1.0", optional = true }
[tool.poetry.dev-dependencies]
pytest = "6.2.5"
pytest-cov = "2.10.1"
python-semantic-release = "^7.25.2"
pylint = "^2.12.2"
black = { version = "^22.1.0", allow-prereleases = true }
eth_tester = "0.5.0b3"
py-evm = "0.3.0a20"
rlp = "2.0.1"
mypy = "^0.961"
[tool.poetry.extras]
eth = [
"chainlib-eth",
"eth-token-index",
"eth-address-index",
"okota",
"cic_eth_registry",
"cic_contracts",
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.pytest.ini_options]
addopts = "--cov=cic --cov-report term-missing -v"
testpaths = ["tests"]
[tool.semantic_release]
version_variable = ["cic/__init__.py:__version__", "pyproject.toml:version"]
version_source = "commit"
branch = "master"
upload_to_repository = true
upload_to_release = true
build_command = "pip install poetry && poetry build"
hvcs = "gitea"
hvcs_domain = "git.grassecon.net"
check_build_status = false

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
funga-eth~=0.5.1
cic-types~=0.2.1a8
confini~=0.5.3
chainlib~=0.0.17
cbor2==5.4.1
usumbufu==0.3.6

17
run_tests.sh Normal file
View File

@ -0,0 +1,17 @@
#!/bin/bash
set -a
set -e
set -x
default_pythonpath=$PYTHONPATH:.
export PYTHONPATH=${default_pythonpath:-.}
>&2 echo using pythonpath $PYTHONPATH
for f in `ls tests/*.py`; do
python $f
done
for f in `ls tests/eth/*.py`; do
python $f
done
set +x
set +e
set +a

32
setup.cfg Normal file
View File

@ -0,0 +1,32 @@
[metadata]
name = cic
version = 0.0.2
description = Generic cli tooling for the CIC token network
author = Louis Holbrook
author_email = dev@holbrook.no
url = https://git.grassecon.net/cic-cli.git
keywords =
dlt
blockchain
cryptocurrency
classifiers =
Programming Language :: Python :: 3
Operating System :: OS Independent
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: Developers
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
Topic :: Internet
license = GPL3
licence_files =
LICENSE.txt
[options]
python_requires = >= 3.8
include_package_data = True
packages =
cic
cic.runnable
cic.ext.eth
cic.cmd

35
setup.py Normal file
View File

@ -0,0 +1,35 @@
from setuptools import setup
import configparser
import os
requirements = []
f = open('requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
requirements.append(l.rstrip())
f.close()
eth_requirements = []
f = open('eth_requirements.txt', 'r')
while True:
l = f.readline()
if l == '':
break
eth_requirements.append(l.rstrip())
f.close()
setup(
install_requires=requirements,
extras_require={
'eth': eth_requirements,
},
entry_points={
'console_scripts': [
'cic-cli=cic.runnable.cic_cmd:main',
],
},
)

7
test_requirements.txt Normal file
View File

@ -0,0 +1,7 @@
eth-erc20>=0.1.2a3,<0.2.0
eth_tester==0.5.0b3
py-evm==0.3.0a20
rlp==2.0.1
chainlib-eth>=0.0.10a2,<0.1.0
eth-address-index>=0.2.4a1,<0.3.0
okota>=0.2.4a6,<0.3.0

View File

View File

@ -1,41 +1,43 @@
# standard imports # standard imports
import os import os
import random
import tempfile import tempfile
import logging
import unittest import unittest
import random
# external imports # external imports
from hexathon import add_0x from hexathon import add_0x
# local imports # local imports
from cic.writers import KVWriter from cic.output import KVWriter
from cic.contract.components.attachment import Attachment from cic.processor import Processor
from cic.contract.components.proof import Proof from cic.attachment import Attachment
from cic.contract.processor import ContractProcessor from cic import Proof
test_base_dir = os.path.dirname(os.path.realpath(__file__)) test_base_dir = os.path.dirname(os.path.realpath(__file__))
test_data_dir = os.path.join(test_base_dir, "testdata") test_data_dir = os.path.join(test_base_dir, 'testdata')
proof_hash = "0f6fc017f29caf512c0feaaf83bc10614b488311cace2973dc248dc24b01e04f" proof_hash = '0f6fc017f29caf512c0feaaf83bc10614b488311cace2973dc248dc24b01e04f'
foo_hash = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae" foo_hash = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
bar_hash = "fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9" bar_hash = 'fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9'
root_merged_hash = "2a27a488377c753fffea58ad535cfdacc2fcb5cf0ae495ec71d88e31757ec0c3" root_merged_hash = '795fed550ada0ec1eea4309a282f5910bc3bdb3a9762c7d9cc25d6de71c45096'
root_unmerged_hash = "14dc271290eca763e99c2e7c21c541bded86fb803c6b01bac28cd367db34399c" root_unmerged_hash = '5dc81e51703e624f498663e7d5d70429b824e9ff60f92b61fe47eb6862a971b4'
class TestCICBase(unittest.TestCase): class TestCICBase(unittest.TestCase):
def setUp(self): def setUp(self):
super(TestCICBase, self).setUp() super(TestCICBase, self).setUp()
random.seed(42) random.seed(42)
f = open("/dev/urandom", "rb") f = open('/dev/urandom', 'rb')
addresses = [] addresses = []
for _i in range(3): for i in range(3):
address_bytes = f.read(32) address_bytes = f.read(32)
addresses.append(add_0x(address_bytes.hex())) addresses.append(add_0x(address_bytes.hex()))
self.token_symbol = "FOO" self.token_symbol = 'FOO'
token_address_bytes = f.read(20) token_address_bytes = f.read(20)
token_index_address_bytes = f.read(20) token_index_address_bytes = f.read(20)
address_declarator_address_bytes = f.read(20) address_declarator_address_bytes = f.read(20)
@ -48,23 +50,23 @@ class TestCICBase(unittest.TestCase):
self.outputs_dir = tempfile.mkdtemp() self.outputs_dir = tempfile.mkdtemp()
self.outputs_writer = KVWriter(self.outputs_dir) self.outputs_writer = KVWriter(self.outputs_dir)
self.core_processor = ContractProcessor(outputs_writer=self.outputs_writer) self.core_processor = Processor(outputs_writer=self.outputs_writer)
self.resources = { self.resources = {
"token": { 'token': {
"reference": self.token_address, 'reference': self.token_address,
"key_address": addresses[0], 'key_address': addresses[0],
}, },
"token_index": { 'token_index': {
"reference": self.token_index_address, 'reference': self.token_index_address,
"key_address": addresses[1], 'key_address': addresses[1],
}, },
"address_declarator": { 'address_declarator': {
"reference": self.address_declarator_address, 'reference': self.address_declarator_address,
"key_address": addresses[2], 'key_address': addresses[2],
}, },
} }
proof_dir = os.path.join(test_data_dir, "proof") proof_dir = os.path.join(test_data_dir, 'proof')
attach = Attachment(path=proof_dir) attach = Attachment(path=proof_dir)
attach.load() attach.load()
self.proofs = Proof(proof_dir, attachments=attach) self.proofs = Proof(proof_dir, attachments=attach)

View File

@ -1,4 +1,4 @@
# standard imports # standard imports import unittestimport logging
import random import random
import os import os
import logging import logging
@ -24,10 +24,10 @@ from cic_contracts.writer import CICWriter
# local imports # local imports
from cic.ext.eth import CICEth from cic.ext.eth import CICEth
from cic.writers import KVWriter from cic import Proof
from cic.contract.processor import ContractProcessor from cic.attachment import Attachment
from cic.contract.components.proof import Proof from cic.output import KVWriter
from cic.contract.components.attachment import Attachment from cic.processor import Processor
# test imports # test imports
@ -127,4 +127,4 @@ class TestCICEthTokenBase(TestCICEthBase):
self.token_precision = 8 self.token_precision = 8
self.token_supply = 1073741824 self.token_supply = 1073741824
self.core_processor = ContractProcessor(outputs_writer=self.outputs_writer, extensions=[self.adapter]) self.core_processor = Processor(outputs_writer=self.outputs_writer, extensions=[self.adapter])

View File

@ -27,8 +27,8 @@ from giftable_erc20_token import GiftableToken
# local imports # local imports
from cic.ext.eth import CICEth from cic.ext.eth import CICEth
from cic.contract.processor import ContractProcessor from cic.processor import Processor
from cic.contract.components.token import Token from cic.token import Token
# test imports # test imports
from tests.eth.base_eth import TestCICEthTokenBase from tests.eth.base_eth import TestCICEthTokenBase
@ -46,7 +46,7 @@ class TestCICEthRPC(TestCICEthTokenBase):
gas_oracle = RPCGasOracle(self.rpc) gas_oracle = RPCGasOracle(self.rpc)
self.adapter = CICEth(self.chain_spec, self.resources, self.proofs, signer=self.signer, rpc=self.rpc, fee_oracle=gas_oracle, outputs_writer=self.outputs_writer) self.adapter = CICEth(self.chain_spec, self.resources, self.proofs, signer=self.signer, rpc=self.rpc, fee_oracle=gas_oracle, outputs_writer=self.outputs_writer)
self.core_processor = ContractProcessor(outputs_writer=self.outputs_writer, extensions=[self.adapter]) self.core_processor = Processor(outputs_writer=self.outputs_writer, extensions=[self.adapter])
def test_rpc_process_notoken(self): def test_rpc_process_notoken(self):

View File

@ -5,7 +5,7 @@ import os
# local imports # local imports
from cic.ext.eth import CICEth from cic.ext.eth import CICEth
from cic.contract.processor import ContractProcessor from cic.processor import Processor
# tests imports # tests imports
from tests.eth.base_eth import TestCICEthBase from tests.eth.base_eth import TestCICEthBase

View File

@ -11,7 +11,7 @@ from hexathon import (
# local imports # local imports
from cic.ext.eth import CICEth from cic.ext.eth import CICEth
from cic.contract.processor import ContractProcessor from cic.processor import Processor
# tests imports # tests imports
from tests.eth.base_eth import TestCICEthBase from tests.eth.base_eth import TestCICEthBase
@ -25,7 +25,7 @@ class TestCICEthSign(TestCICEthBase):
def setUp(self): def setUp(self):
super(TestCICEthSign, self).setUp() super(TestCICEthSign, self).setUp()
self.adapter = CICEth(self.chain_spec, self.resources, self.proofs, signer=self.signer) self.adapter = CICEth(self.chain_spec, self.resources, self.proofs, signer=self.signer)
self.core_processor = ContractProcessor(outputs_writer=self.outputs_writer, extensions=[self.adapter]) self.core_processor = Processor(outputs_writer=self.outputs_writer, extensions=[self.adapter])
def test_sign_token_index(self): def test_sign_token_index(self):

View File

@ -1,138 +0,0 @@
import os
import tempfile
import pytest
from cic.contract.csv import load_contract_from_csv
from tests.base_cic import test_data_dir
@pytest.mark.skip(reason="Public RPC is currently dead")
def test_csv_generate_demurrage():
outputs_dir = os.path.join(tempfile.mkdtemp(), "outputs")
test_csv_path = os.path.join(test_data_dir, "voucher", "bondi.csv")
contract = load_contract_from_csv(
{
"WALLET_KEY_FILE": os.path.join(test_data_dir, "keystore", "ok"),
"WALLET_PASSPHRASE": "test",
"CHAIN_SPEC": "evm:kitabu:6060:sarafu",
"RPC_PROVIDER": "http://142.93.38.53:8545",
"CIC_REGISTRY_ADDRESS": "0xe3e3431BF25b06166513019Ed7B21598D27d05dC",
},
outputs_dir,
csv_path=test_csv_path,
)
# assert len(contracts) == 1
# contract = contracts[0]
# Token
assert contract.token.name == "Bondeni"
assert contract.token.extra_args == [
"46124891913883000000000000000000",
"1440",
"0xB8830b647C01433F9492F315ddBFDc35CB3Be6A6",
]
assert contract.token.extra_args_types == ["uint256", "uint256", "address"]
# assert contract.token.code == os.path.join(test_data_dir, "contracts", "Bondi.bin")
assert contract.token.precision == '6'
assert contract.token.supply == "5025"
assert contract.token.symbol == "BONDE"
# Meta
assert contract.meta.country_code == "KE"
assert contract.meta.location == "Mutitu Kilifi"
assert contract.meta.contact == {
"email": "info@grassecon.org",
"phone": "254797782065",
}
assert contract.meta.name == "Bondeni SHG"
# Network
assert contract.network.resources["eth"]["chain_spec"] == {
"arch": "evm",
"common_name": "sarafu",
"custom": [],
"extra": {},
"fork": "kitabu",
"network_id": 6060,
}
assert contract.network.resources["eth"]["contents"] == {
"address_declarator": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": "f055e83f713DbFF947e923749Af9802eaffFB5f9",
},
"token": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": None,
},
"token_index": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": "5A1EB529438D8b3cA943A45a48744f4c73d1f098",
},
}
assert contract.proof.description == "1 BONDE = 1 itumbe"
assert contract.proof.namespace == "ge"
assert contract.proof.proofs == []
assert contract.proof.version() == 0
@pytest.mark.skip(reason="Public RPC is currently dead")
def test_csv_generate_giftable():
outputs_dir = os.path.join(tempfile.mkdtemp(), "outputs")
test_csv_path = os.path.join(test_data_dir, "voucher", "bondi_giftable.csv")
contract = load_contract_from_csv(
{
"WALLET_KEY_FILE": os.path.join(test_data_dir, "keystore", "ok"),
"WALLET_PASSPHRASE": "test",
"CHAIN_SPEC": "evm:kitabu:6060:sarafu",
"RPC_PROVIDER": "http://142.93.38.53:8545",
"CIC_REGISTRY_ADDRESS": "0xe3e3431BF25b06166513019Ed7B21598D27d05dC",
},
outputs_dir,
csv_path=test_csv_path,
)
# assert len(contracts) == 1
# contract = contracts[0]
# Token
assert contract.token.name == "Bondeni"
assert contract.token.extra_args == []
assert contract.token.extra_args_types == []
# assert contract.token.code == os.path.join(test_data_dir, "contracts", "Bondi.bin")
assert contract.token.precision == '6'
assert contract.token.supply == "5025"
assert contract.token.symbol == "BONDE"
# Meta
assert contract.meta.country_code == "KE"
assert contract.meta.location == "Mutitu Kilifi"
assert contract.meta.contact == {
"email": "info@grassecon.org",
"phone": "254797782065",
}
assert contract.meta.name == "Bondeni SHG"
# Network
assert contract.network.resources["eth"]["chain_spec"] == {
"arch": "evm",
"common_name": "sarafu",
"custom": [],
"extra": {},
"fork": "kitabu",
"network_id": 6060,
}
assert contract.network.resources["eth"]["contents"] == {
"address_declarator": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": "f055e83f713DbFF947e923749Af9802eaffFB5f9",
},
"token": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": None,
},
"token_index": {
"key_account": "cc4f82f5dacde395e1e0cfc4d62827c8b8b5688c",
"reference": "5A1EB529438D8b3cA943A45a48744f4c73d1f098",
},
}
assert contract.proof.description == "1 BONDE = 1 itumbe"
assert contract.proof.namespace == "ge"
assert contract.proof.proofs == []
assert contract.proof.version() == 0

View File

@ -1,26 +1,23 @@
# standard imports # standard imports
import logging
import os import os
import unittest import unittest
import logging
# local imports # local imports
from funga.error import DecryptError
from funga.eth.keystore.dict import DictKeystore
from hexathon import uniform as hex_uniform
# external imports
from cic.keystore import KeystoreDirectory from cic.keystore import KeystoreDirectory
from funga.eth.keystore.dict import DictKeystore
from funga.error import DecryptError
from hexathon import uniform as hex_uniform
# test imports # test imports
from tests.base_cic import test_base_dir from tests.base_cic import test_base_dir
log = logging.getLogger(__name__) logging = logging.getLogger()
script_dir = test_base_dir script_dir = test_base_dir
def pass_getter(): def pass_getter():
return "test" return 'test'
class EthKeystoreDirectory(DictKeystore, KeystoreDirectory): class EthKeystoreDirectory(DictKeystore, KeystoreDirectory):
@ -28,25 +25,25 @@ class EthKeystoreDirectory(DictKeystore, KeystoreDirectory):
class TestKeyfile(unittest.TestCase): class TestKeyfile(unittest.TestCase):
def setUp(self): def setUp(self):
self.path = os.path.join(script_dir, "testdata", "keystore") self.path = os.path.join(script_dir, 'testdata', 'keystore')
self.keystore = EthKeystoreDirectory() self.keystore = EthKeystoreDirectory()
def test_keystore_bogus(self): def test_keystore_bogus(self):
bogus_path = os.path.join(self.path, "bogus") bogus_path = os.path.join(self.path, 'bogus')
self.keystore.process_dir(bogus_path) self.keystore.process_dir(bogus_path)
def test_keystore_ok(self): def test_keystore_ok(self):
ok_path = os.path.join(self.path, "ok") ok_path = os.path.join(self.path, 'ok')
with self.assertRaises(DecryptError): with self.assertRaises(DecryptError):
self.keystore.process_dir(ok_path) # wrong password self.keystore.process_dir(ok_path) # wrong password
self.keystore.process_dir(ok_path, default_password="test") self.keystore.process_dir(ok_path, default_password='test')
self.keystore.process_dir(ok_path, password_retriever=pass_getter) self.keystore.process_dir(ok_path, password_retriever=pass_getter)
self.assertTrue( self.assertTrue(hex_uniform('cc4f82F5DacDE395E1E0CFc4d62827C8B8B5688C') in self.keystore.list())
hex_uniform("cc4f82F5DacDE395E1E0CFc4d62827C8B8B5688C")
in self.keystore.list()
)
if __name__ == "__main__": if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,14 +1,14 @@
# standard imports # standard imports
import unittest
import logging import logging
import os import os
import unittest
# local imports
from cic.meta import Meta
# external imports # external imports
from hexathon import strip_0x from hexathon import strip_0x
# local imports
from cic.contract.components.meta import Meta
# test imports # test imports
from tests.base_cic import TestCICBase, test_data_dir from tests.base_cic import TestCICBase, test_data_dir

View File

@ -7,7 +7,7 @@ import logging
from hexathon import strip_0x from hexathon import strip_0x
# local imports # local imports
from cic.writers import KVWriter from cic.output import KVWriter
# test imports # test imports
from tests.base_cic import TestCICBase from tests.base_cic import TestCICBase

View File

@ -3,12 +3,17 @@ import unittest
import logging import logging
import os import os
import json import json
import sys
# external imports
from hexathon import strip_0x
# local imports # local imports
from cic.contract.processor import ContractProcessor from cic import Proof
from cic.contract.components.proof import Proof from cic.processor import Processor
from cic.contract.components.attachment import Attachment from cic.attachment import Attachment
from cic.contract.components.meta import Meta from cic.meta import Meta
from cic.output import KVWriter
# test imports # test imports
from tests.base_cic import ( from tests.base_cic import (
@ -23,65 +28,66 @@ logg.setLevel(logging.DEBUG)
class MockExt: class MockExt:
def __init__(self, address): def __init__(self, address):
self.address = address self.address = address
def process(self): def process(self):
return (self.address, "foo") return (self.address, 'foo')
class TestCICProcessor(TestCICBase): class TestCICProcessor(TestCICBase):
def test_processor_meta(self): def test_processor_meta(self):
fp = os.path.join(test_data_dir, "proof") fp = os.path.join(test_data_dir, 'proof')
m = Meta(fp) m = Meta(fp)
m.load() m.load()
mock_ext = MockExt(self.token_address) mock_ext = MockExt(self.token_address)
p = ContractProcessor( p = Processor(metadata=m, outputs_writer=self.outputs_writer, extensions=[mock_ext])
metadata=m, outputs_writer=self.outputs_writer, extensions=[mock_ext]
)
p.token_address = self.token_address p.token_address = self.token_address
p.process() p.process()
meta_reference = m.reference(self.token_address) meta_reference = m.reference(self.token_address)
fp = os.path.join(self.outputs_dir, meta_reference) fp = os.path.join(self.outputs_dir, meta_reference)
with open(fp, "r", encoding="utf-8") as f: f = open(fp, 'r')
o = json.load(f) o = json.load(f)
f.close()
self.assertEqual(m.asdict(), o) self.assertEqual(m.asdict(), o)
def test_processor_attachment(self): def test_processor_attachment(self):
fp = os.path.join(test_data_dir, "proof") fp = os.path.join(test_data_dir, 'proof')
m = Attachment(fp) m = Attachment(fp)
m.load() m.load()
mock_ext = MockExt(self.token_address) mock_ext = MockExt(self.token_address)
p = ContractProcessor( p = Processor(attachment=m, outputs_writer=self.outputs_writer, extensions=[mock_ext])
attachment=m, outputs_writer=self.outputs_writer, extensions=[mock_ext]
)
p.process() p.process()
for _k in list(m.contents.keys()):
for k in list(m.contents.keys()):
os.stat(fp) os.stat(fp)
def test_processor_proof_noattachment(self): def test_processor_proof_noattachment(self):
fp = os.path.join(test_data_dir, "proof") fp = os.path.join(test_data_dir, 'proof')
m = Proof(fp) m = Proof(fp)
ap = os.path.join(test_data_dir, "proof_empty") ap = os.path.join(test_data_dir, 'proof_empty')
m.extra_attachments = Attachment(ap) m.extra_attachments = Attachment(ap)
m.load() m.load()
mock_ext = MockExt(self.token_address) mock_ext = MockExt(self.token_address)
p = ContractProcessor( p = Processor(proof=m, outputs_writer=self.outputs_writer, extensions=[mock_ext])
proof=m, outputs_writer=self.outputs_writer, extensions=[mock_ext]
)
p.process() p.process()
self.assertEqual(p.outputs[0], root_unmerged_hash) self.assertEqual(p.outputs[0], root_unmerged_hash)
def test_processor_proof_attachment(self): def test_processor_proof_attachment(self):
fp = os.path.join(test_data_dir, "proof") fp = os.path.join(test_data_dir, 'proof')
ma = Attachment(fp) ma = Attachment(fp)
ma.load() ma.load()
@ -90,13 +96,11 @@ class TestCICProcessor(TestCICBase):
mp.load() mp.load()
mock_ext = MockExt(self.token_address) mock_ext = MockExt(self.token_address)
p = ContractProcessor( p = Processor(proof=mp, outputs_writer=self.outputs_writer, extensions=[mock_ext])
proof=mp, outputs_writer=self.outputs_writer, extensions=[mock_ext]
)
p.process() p.process()
self.assertEqual(p.outputs[0], root_merged_hash) self.assertEqual(p.outputs[0], root_merged_hash)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -1,20 +1,21 @@
# standard imports # standard imports
import logging
import os import os
import unittest import unittest
import logging
# local imports # local imports
from cic.contract.components.proof import Proof from cic import Proof
from cic.contract.components.attachment import Attachment from cic.attachment import Attachment
# test imports # test imports
from tests.base_cic import test_data_dir, TestCICBase, root_merged_hash from tests.base_cic import TestCICBase, root_merged_hash, test_data_dir
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger() logg = logging.getLogger()
class TestProof(TestCICBase): class TestProof(TestCICBase):
def test_proof_load(self): def test_proof(self):
proof_path = os.path.join(test_data_dir, "proof") proof_path = os.path.join(test_data_dir, "proof")
attach = Attachment(proof_path, writer=self.outputs_writer) attach = Attachment(proof_path, writer=self.outputs_writer)
attach.load() attach.load()

View File

@ -1,2 +0,0 @@
issuer,namespace,voucher_name,symbol,location,country_code,supply,precision,token_type,demurrage,period_minutes,phone_number,email_address,sink_account,description
Bondeni SHG,ge,Bondeni,BONDE,Mutitu Kilifi,KE,5025,6,demurrage,46124891913883000000000000000000,1440,254797782065,info@grassecon.org,0xB8830b647C01433F9492F315ddBFDc35CB3Be6A6,1 BONDE = 1 itumbe
1 issuer namespace voucher_name symbol location country_code supply precision token_type demurrage period_minutes phone_number email_address sink_account description
2 Bondeni SHG ge Bondeni BONDE Mutitu Kilifi KE 5025 6 demurrage 46124891913883000000000000000000 1440 254797782065 info@grassecon.org 0xB8830b647C01433F9492F315ddBFDc35CB3Be6A6 1 BONDE = 1 itumbe

View File

@ -1,2 +0,0 @@
issuer,namespace,voucher_name,symbol,location,country_code,supply,precision,token_type,demurrage,period_minutes,phone_number,email_address,sink_account,description
Bondeni SHG,ge,Bondeni,BONDE,Mutitu Kilifi,KE,5025,6,giftable,,,254797782065,info@grassecon.org,,1 BONDE = 1 itumbe
1 issuer namespace voucher_name symbol location country_code supply precision token_type demurrage period_minutes phone_number email_address sink_account description
2 Bondeni SHG ge Bondeni BONDE Mutitu Kilifi KE 5025 6 giftable 254797782065 info@grassecon.org 1 BONDE = 1 itumbe