Merge "origin/master" into "philip/import-pins-script"

This commit is contained in:
PhilipWafula 2021-04-29 12:38:05 +03:00
commit daa022c3d8
Signed by untrusted user: mango-habanero
GPG Key ID: B00CE9034DA19FB7
101 changed files with 1692 additions and 1766 deletions

6
.gitignore vendored
View File

@ -1,6 +1,10 @@
service-configs/* service-configs/*
!service-configs/.gitkeep !service-configs/.gitkeep
node_modules **/node_modules/
__pycache__ __pycache__
*.pyc *.pyc
*.o *.o
gmon.out
*.egg-info
dist/
build/

View File

@ -67,6 +67,7 @@ class ERC20TransferFilter(SyncFilter):
tx.status == Status.SUCCESS, tx.status == Status.SUCCESS,
block.timestamp, block.timestamp,
) )
db_session.flush() #db_session.flush()
db_session.commit()
return True return True

View File

@ -77,7 +77,7 @@ def main():
if len(syncer_backends) == 0: if len(syncer_backends) == 0:
logg.info('found no backends to resume') logg.info('found no backends to resume')
syncers.append(SQLBackend.initial(chain_spec, block_offset)) syncer_backends.append(SQLBackend.initial(chain_spec, block_offset))
else: else:
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
logg.info('resuming sync session {}'.format(syncer_backend)) logg.info('resuming sync session {}'.format(syncer_backend))

View File

@ -2,4 +2,9 @@
. ./db.sh . ./db.sh
if [ $? -ne "0" ]; then
>&2 echo db migrate fail
exit 1
fi
/usr/local/bin/cic-cache-trackerd $@ /usr/local/bin/cic-cache-trackerd $@

View File

@ -2,7 +2,7 @@
import datetime import datetime
import logging import logging
# third-party imports # external imports
import celery import celery
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
@ -32,7 +32,9 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash) r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r)) logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
return chained_input return chained_input
@ -51,7 +53,9 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.reset(chain_str, flags, address=address) r = Lock.reset(chain_str, flags, address=address)
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r)) logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
return chained_input return chained_input
@ -127,7 +131,9 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None): def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = '::'
if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
session = SessionBase.create_session() session = SessionBase.create_session()
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session) r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session)
if address != None: if address != None:
@ -139,3 +145,9 @@ def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
session.flush() session.flush()
session.close() session.close()
return chained_input return chained_input
@celery_app.task()
def shutdown(message):
logg.critical('shutdown called: {}'.format(message))
celery_app.control.shutdown() #broadcast('shutdown')

View File

@ -0,0 +1,19 @@
# standard imports
import logging
# external imports
import celery
# local imports
from cic_eth.task import BaseTask
celery_app = celery.current_app
logg = logging.getLogger()
@celery_app.task(bind=True, base=BaseTask)
def default_token(self):
return {
'symbol': self.default_token_symbol,
'address': self.default_token_address,
}

View File

@ -60,6 +60,29 @@ class AdminApi:
self.call_address = call_address self.call_address = call_address
def proxy_do(self, chain_spec, o):
s_proxy = celery.signature(
'cic_eth.task.rpc_proxy',
[
chain_spec.asdict(),
o,
'default',
],
queue=self.queue
)
return s_proxy.apply_async()
def registry(self):
s_registry = celery.signature(
'cic_eth.task.registry',
[],
queue=self.queue
)
return s_registry.apply_async()
def unlock(self, chain_spec, address, flags=None): def unlock(self, chain_spec, address, flags=None):
s_unlock = celery.signature( s_unlock = celery.signature(
'cic_eth.admin.ctrl.unlock', 'cic_eth.admin.ctrl.unlock',
@ -146,7 +169,6 @@ class AdminApi:
# TODO: This check should most likely be in resend task itself # TODO: This check should most likely be in resend task itself
tx_dict = s_get_tx_cache.apply_async().get() tx_dict = s_get_tx_cache.apply_async().get()
#if tx_dict['status'] in [StatusEnum.REVERTED, StatusEnum.SUCCESS, StatusEnum.CANCELLED, StatusEnum.OBSOLETED]:
if not is_alive(getattr(StatusEnum, tx_dict['status']).value): if not is_alive(getattr(StatusEnum, tx_dict['status']).value):
raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex)) raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex))
@ -226,9 +248,6 @@ class AdminApi:
break break
last_nonce = nonce_otx last_nonce = nonce_otx
#nonce_cache = Nonce.get(address)
#nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
return { return {
'nonce': { 'nonce': {
#'network': nonce_cache, #'network': nonce_cache,
@ -272,20 +291,6 @@ class AdminApi:
return s_nonce.apply_async() return s_nonce.apply_async()
# # TODO: this is a stub, complete all checks
# def ready(self):
# """Checks whether all required initializations have been performed.
#
# :raises cic_eth.error.InitializationError: At least one setting pre-requisite has not been met.
# :raises KeyError: An address provided for initialization is not known by the keystore.
# """
# addr = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
# if addr == ZERO_ADDRESS:
# raise InitializationError('missing account ETH_GAS_PROVIDER_ADDRESS')
#
# self.w3.eth.sign(addr, text='666f6f')
def account(self, chain_spec, address, include_sender=True, include_recipient=True, renderer=None, w=sys.stdout): def account(self, chain_spec, address, include_sender=True, include_recipient=True, renderer=None, w=sys.stdout):
"""Lists locally originated transactions for the given Ethereum address. """Lists locally originated transactions for the given Ethereum address.
@ -348,6 +353,7 @@ class AdminApi:
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring # TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
# TODO: This method is WAY too long
def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None, renderer=None, w=sys.stdout): def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None, renderer=None, w=sys.stdout):
"""Output local and network details about a given transaction with local origin. """Output local and network details about a given transaction with local origin.
@ -370,7 +376,6 @@ class AdminApi:
if tx_raw != None: if tx_raw != None:
tx_hash = add_0x(keccak256_hex_to_hex(tx_raw)) tx_hash = add_0x(keccak256_hex_to_hex(tx_raw))
#tx_hash = self.w3.keccak(hexstr=tx_raw).hex()
s = celery.signature( s = celery.signature(
'cic_eth.queue.query.get_tx_cache', 'cic_eth.queue.query.get_tx_cache',
@ -386,38 +391,78 @@ class AdminApi:
source_token = None source_token = None
if tx['source_token'] != ZERO_ADDRESS: if tx['source_token'] != ZERO_ADDRESS:
try: if registry != None:
source_token = registry.by_address(tx['source_token']) try:
#source_token = CICRegistry.get_address(chain_spec, tx['source_token']).contract source_token = registry.by_address(tx['source_token'])
except UnknownContractError: except UnknownContractError:
#source_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token']) logg.warning('unknown source token contract {} (direct)'.format(tx['source_token']))
#source_token = CICRegistry.add_token(chain_spec, source_token_contract) else:
logg.warning('unknown source token contract {}'.format(tx['source_token'])) s = celery.signature(
'cic_eth.task.registry_address_lookup',
[
chain_spec.asdict(),
tx['source_token'],
],
queue=self.queue
)
t = s.apply_async()
source_token = t.get()
if source_token == None:
logg.warning('unknown source token contract {} (task pool)'.format(tx['source_token']))
destination_token = None destination_token = None
if tx['source_token'] != ZERO_ADDRESS: if tx['destination_token'] != ZERO_ADDRESS:
try: if registry != None:
#destination_token = CICRegistry.get_address(chain_spec, tx['destination_token']) try:
destination_token = registry.by_address(tx['destination_token']) destination_token = registry.by_address(tx['destination_token'])
except UnknownContractError: except UnknownContractError:
#destination_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token']) logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
#destination_token = CICRegistry.add_token(chain_spec, destination_token_contract) else:
logg.warning('unknown destination token contract {}'.format(tx['destination_token'])) s = celery.signature(
'cic_eth.task.registry_address_lookup',
[
chain_spec.asdict(),
tx['destination_token'],
],
queue=self.queue
)
t = s.apply_async()
destination_token = t.get()
if destination_token == None:
logg.warning('unknown destination token contract {} (task pool)'.format(tx['destination_token']))
tx['sender_description'] = 'Custodial account' tx['sender_description'] = 'Custodial account'
tx['recipient_description'] = 'Custodial account' tx['recipient_description'] = 'Custodial account'
o = code(tx['sender']) o = code(tx['sender'])
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
if len(strip_0x(r, allow_empty=True)) > 0: if len(strip_0x(r, allow_empty=True)) > 0:
try: if registry != None:
#sender_contract = CICRegistry.get_address(chain_spec, tx['sender']) try:
sender_contract = registry.by_address(tx['sender'], sender_address=self.call_address) sender_contract = registry.by_address(tx['sender'], sender_address=self.call_address)
tx['sender_description'] = 'Contract at {}'.format(tx['sender']) #sender_contract) tx['sender_description'] = 'Contract at {}'.format(tx['sender'])
except UnknownContractError: except UnknownContractError:
tx['sender_description'] = 'Unknown contract' tx['sender_description'] = 'Unknown contract'
except KeyError as e: except KeyError as e:
tx['sender_description'] = 'Unknown contract' tx['sender_description'] = 'Unknown contract'
else:
s = celery.signature(
'cic_eth.task.registry_address_lookup',
[
chain_spec.asdict(),
tx['sender'],
],
queue=self.queue
)
t = s.apply_async()
tx['sender_description'] = t.get()
if tx['sender_description'] == None:
tx['sender_description'] = 'Unknown contract'
else: else:
s = celery.signature( s = celery.signature(
'cic_eth.eth.account.have', 'cic_eth.eth.account.have',
@ -446,16 +491,31 @@ class AdminApi:
tx['sender_description'] = role tx['sender_description'] = role
o = code(tx['recipient']) o = code(tx['recipient'])
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
if len(strip_0x(r, allow_empty=True)) > 0: if len(strip_0x(r, allow_empty=True)) > 0:
try: if registry != None:
#recipient_contract = CICRegistry.by_address(tx['recipient']) try:
recipient_contract = registry.by_address(tx['recipient']) recipient_contract = registry.by_address(tx['recipient'])
tx['recipient_description'] = 'Contract at {}'.format(tx['recipient']) #recipient_contract) tx['recipient_description'] = 'Contract at {}'.format(tx['recipient'])
except UnknownContractError as e: except UnknownContractError as e:
tx['recipient_description'] = 'Unknown contract' tx['recipient_description'] = 'Unknown contract'
except KeyError as e: except KeyError as e:
tx['recipient_description'] = 'Unknown contract' tx['recipient_description'] = 'Unknown contract'
else:
s = celery.signature(
'cic_eth.task.registry_address_lookup',
[
chain_spec.asdict(),
tx['recipient'],
],
queue=self.queue
)
t = s.apply_async()
tx['recipient_description'] = t.get()
if tx['recipient_description'] == None:
tx['recipient_description'] = 'Unknown contract'
else: else:
s = celery.signature( s = celery.signature(
'cic_eth.eth.account.have', 'cic_eth.eth.account.have',
@ -497,7 +557,8 @@ class AdminApi:
r = None r = None
try: try:
o = transaction(tx_hash) o = transaction(tx_hash)
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
if r != None: if r != None:
tx['network_status'] = 'Mempool' tx['network_status'] = 'Mempool'
except Exception as e: except Exception as e:
@ -506,7 +567,8 @@ class AdminApi:
if r != None: if r != None:
try: try:
o = receipt(tx_hash) o = receipt(tx_hash)
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
logg.debug('h {} o {}'.format(tx_hash, o)) logg.debug('h {} o {}'.format(tx_hash, o))
if int(strip_0x(r['status'])) == 1: if int(strip_0x(r['status'])) == 1:
tx['network_status'] = 'Confirmed' tx['network_status'] = 'Confirmed'
@ -521,11 +583,13 @@ class AdminApi:
pass pass
o = balance(tx['sender']) o = balance(tx['sender'])
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
tx['sender_gas_balance'] = r tx['sender_gas_balance'] = r
o = balance(tx['recipient']) o = balance(tx['recipient'])
r = self.rpc.do(o) t = self.proxy_do(chain_spec, o)
r = t.get()
tx['recipient_gas_balance'] = r tx['recipient_gas_balance'] = r
tx_unpacked = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec) tx_unpacked = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)

View File

@ -62,6 +62,18 @@ class Api:
) )
def default_token(self):
s_token = celery.signature(
'cic_eth.admin.token.default_token',
[],
queue=self.queue,
)
if self.callback_param != None:
s_token.link(self.callback_success)
return s_token.apply_async()
def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol): def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed. """Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.

View File

View File

@ -0,0 +1,8 @@
from cic_eth.db.models.base import SessionBase
def health(*args, **kwargs):
session = SessionBase.create_session()
session.execute('SELECT count(*) from alembic_version')
session.close()
return True

View File

@ -0,0 +1,48 @@
# standard imports
import logging
# external imports
from chainlib.connection import RPCConnection
from chainlib.chain import ChainSpec
from chainlib.eth.gas import balance
# local imports
from cic_eth.db.models.role import AccountRole
from cic_eth.db.models.base import SessionBase
from cic_eth.db.enum import LockEnum
from cic_eth.error import LockedError
from cic_eth.admin.ctrl import check_lock
logg = logging.getLogger().getChild(__name__)
def health(*args, **kwargs):
session = SessionBase.create_session()
config = kwargs['config']
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
logg.debug('check gas balance of gas gifter for chain {}'.format(chain_spec))
try:
check_lock(None, None, LockEnum.INIT)
except LockedError:
logg.warning('INIT lock is set, skipping GAS GIFTER balance check.')
return True
gas_provider = AccountRole.get_address('GAS_GIFTER', session=session)
session.close()
rpc = RPCConnection.connect(chain_spec, 'default')
o = balance(gas_provider)
r = rpc.do(o)
try:
r = int(r, 16)
except TypeError:
r = int(r)
gas_min = int(config.get('ETH_GAS_GIFTER_MINIMUM_BALANCE'))
if r < gas_min:
logg.error('EEK! gas gifter has balance {}, below minimum {}'.format(r, gas_min))
return False
return True

View File

@ -0,0 +1,18 @@
# external imports
import redis
import os
def health(*args, **kwargs):
r = redis.Redis(
host=kwargs['config'].get('REDIS_HOST'),
port=kwargs['config'].get('REDIS_PORT'),
db=kwargs['config'].get('REDIS_DB'),
)
try:
r.set(kwargs['unit'], os.getpid())
except redis.connection.ConnectionError:
return False
except redis.connection.ResponseError:
return False
return True

View File

@ -0,0 +1,37 @@
# standard imports
import time
import logging
from urllib.error import URLError
# external imports
from chainlib.connection import RPCConnection
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.sign import sign_message
from chainlib.error import JSONRPCException
logg = logging.getLogger().getChild(__name__)
def health(*args, **kwargs):
blocked = True
max_attempts = 5
conn = RPCConnection.connect(kwargs['config'].get('CIC_CHAIN_SPEC'), tag='signer')
for i in range(max_attempts):
idx = i + 1
logg.debug('attempt signer connection check {}/{}'.format(idx, max_attempts))
try:
conn.do(sign_message(ZERO_ADDRESS, '0x2a'))
except FileNotFoundError:
pass
except ConnectionError:
pass
except URLError:
pass
except JSONRPCException:
logg.debug('signer connection succeeded')
return True
if idx < max_attempts:
time.sleep(0.5)
return False

View File

@ -74,10 +74,11 @@ class LockEnum(enum.IntEnum):
QUEUE: Disable queueing new or modified transactions QUEUE: Disable queueing new or modified transactions
""" """
STICKY=1 STICKY=1
CREATE=2 INIT=2
SEND=4 CREATE=4
QUEUE=8 SEND=8
QUERY=16 QUEUE=16
QUERY=32
ALL=int(0xfffffffffffffffe) ALL=int(0xfffffffffffffffe)

View File

@ -5,8 +5,11 @@ Revises: 1f1b3b641d08
Create Date: 2021-04-02 18:41:20.864265 Create Date: 2021-04-02 18:41:20.864265
""" """
import datetime
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from chainlib.eth.constant import ZERO_ADDRESS
from cic_eth.db.enum import LockEnum
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
@ -23,10 +26,11 @@ def upgrade():
sa.Column("address", sa.String(42), nullable=True), sa.Column("address", sa.String(42), nullable=True),
sa.Column('blockchain', sa.String), sa.Column('blockchain', sa.String),
sa.Column("flags", sa.BIGINT(), nullable=False, default=0), sa.Column("flags", sa.BIGINT(), nullable=False, default=0),
sa.Column("date_created", sa.DateTime, nullable=False), sa.Column("date_created", sa.DateTime, nullable=False, default=datetime.datetime.utcnow),
sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True), sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
) )
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True) op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
op.execute("INSERT INTO lock (address, date_created, blockchain, flags) VALUES('{}', '{}', '::', {})".format(ZERO_ADDRESS, datetime.datetime.utcnow(), LockEnum.INIT | LockEnum.SEND | LockEnum.QUEUE))
def downgrade(): def downgrade():

View File

@ -48,6 +48,8 @@ class RoleMissingError(Exception):
pass pass
class IntegrityError(Exception): class IntegrityError(Exception):
"""Exception raised to signal irregularities with deduplication and ordering of tasks """Exception raised to signal irregularities with deduplication and ordering of tasks
@ -62,15 +64,19 @@ class LockedError(Exception):
pass pass
class SignerError(Exception): class SeppukuError(Exception):
"""Exception base class for all errors that should cause system shutdown
"""
class SignerError(SeppukuError):
"""Exception raised when signer is unavailable or generates an error """Exception raised when signer is unavailable or generates an error
""" """
pass pass
class EthError(Exception): class RoleAgencyError(SeppukuError):
"""Exception raised when unspecified error from evm node is encountered """Exception raise when a role cannot perform its function. This is a critical exception
""" """
pass

View File

@ -4,10 +4,10 @@ import logging
# external imports # external imports
import celery import celery
from erc20_single_shot_faucet import SingleShotFaucet as Faucet from erc20_single_shot_faucet import SingleShotFaucet as Faucet
from chainlib.eth.constant import ZERO_ADDRESS
from hexathon import ( from hexathon import (
strip_0x, strip_0x,
) )
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.connection import RPCConnection from chainlib.connection import RPCConnection
from chainlib.eth.sign import ( from chainlib.eth.sign import (
new_account, new_account,
@ -19,6 +19,7 @@ from chainlib.eth.tx import (
unpack, unpack,
) )
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.error import JSONRPCException
from eth_accounts_index import AccountRegistry from eth_accounts_index import AccountRegistry
from sarafu_faucet import MinterFaucet as Faucet from sarafu_faucet import MinterFaucet as Faucet
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
@ -70,11 +71,18 @@ def create(self, password, chain_spec_dict):
a = None a = None
conn = RPCConnection.connect(chain_spec, 'signer') conn = RPCConnection.connect(chain_spec, 'signer')
o = new_account() o = new_account()
a = conn.do(o) try:
a = conn.do(o)
except ConnectionError as e:
raise SignerError(e)
except FileNotFoundError as e:
raise SignerError(e)
conn.disconnect() conn.disconnect()
# TODO: It seems infeasible that a can be None in any case, verify
if a == None: if a == None:
raise SignerError('create account') raise SignerError('create account')
logg.debug('created account {}'.format(a)) logg.debug('created account {}'.format(a))
# Initialize nonce provider record for account # Initialize nonce provider record for account
@ -219,21 +227,22 @@ def have(self, account, chain_spec_dict):
""" """
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
o = sign_message(account, '0x2a') o = sign_message(account, '0x2a')
try: conn = RPCConnection.connect(chain_spec, 'signer')
conn = RPCConnection.connect(chain_spec, 'signer')
except Exception as e:
logg.debug('cannot sign with {}: {}'.format(account, e))
return None
try: try:
conn.do(o) conn.do(o)
conn.disconnect() except ConnectionError as e:
return account raise SignerError(e)
except Exception as e: except FileNotFoundError as e:
raise SignerError(e)
except JSONRPCException as e:
logg.debug('cannot sign with {}: {}'.format(account, e)) logg.debug('cannot sign with {}: {}'.format(account, e))
conn.disconnect() conn.disconnect()
return None return None
conn.disconnect()
return account
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask) @celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
def set_role(self, tag, address, chain_spec_dict): def set_role(self, tag, address, chain_spec_dict):

View File

@ -108,7 +108,13 @@ def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_d
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session) nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas) gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle) c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(t['address'], holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED) try:
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(t['address'], holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED)
except FileNotFoundError as e:
raise SignerError(e)
except ConnectionError as e:
raise SignerError(e)
rpc_signer.disconnect() rpc_signer.disconnect()
rpc.disconnect() rpc.disconnect()
@ -171,7 +177,12 @@ def approve(self, tokens, holder_address, spender_address, value, chain_spec_dic
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session) nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas) gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle) c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.approve(t['address'], holder_address, spender_address, value, tx_format=TxFormat.RLP_SIGNED) try:
(tx_hash_hex, tx_signed_raw_hex) = c.approve(t['address'], holder_address, spender_address, value, tx_format=TxFormat.RLP_SIGNED)
except FileNotFoundError as e:
raise SignerError(e)
except ConnectionError as e:
raise SignerError(e)
rpc_signer.disconnect() rpc_signer.disconnect()
rpc.disconnect() rpc.disconnect()

View File

@ -328,7 +328,12 @@ def refill_gas(self, recipient_address, chain_spec_dict):
# build and add transaction # build and add transaction
logg.debug('tx send gas amount {} from provider {} to {}'.format(refill_amount, gas_provider, recipient_address)) logg.debug('tx send gas amount {} from provider {} to {}'.format(refill_amount, gas_provider, recipient_address))
(tx_hash_hex, tx_signed_raw_hex) = c.create(gas_provider, recipient_address, refill_amount, tx_format=TxFormat.RLP_SIGNED) try:
(tx_hash_hex, tx_signed_raw_hex) = c.create(gas_provider, recipient_address, refill_amount, tx_format=TxFormat.RLP_SIGNED)
except ConnectionError as e:
raise SignerError(e)
except FileNotFoundError as e:
raise SignerError(e)
logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex)) logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex))
cache_task = 'cic_eth.eth.gas.cache_gas_data' cache_task = 'cic_eth.eth.gas.cache_gas_data'
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session) register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
@ -404,7 +409,12 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, defa
c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle) c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle)
logg.debug('change gas price from old {} to new {} for tx {}'.format(tx['gasPrice'], new_gas_price, tx)) logg.debug('change gas price from old {} to new {} for tx {}'.format(tx['gasPrice'], new_gas_price, tx))
tx['gasPrice'] = new_gas_price tx['gasPrice'] = new_gas_price
(tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx) try:
(tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx)
except ConnectionError as e:
raise SignerError(e)
except FileNotFoundError as e:
raise SignerError(e)
queue_create( queue_create(
chain_spec, chain_spec,
tx['nonce'], tx['nonce'],

View File

@ -114,7 +114,7 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
# TODO: pass through registry to validate declarator entry of token # TODO: pass through registry to validate declarator entry of token
#token = registry.by_address(tx['to'], sender_address=self.call_address) #token = registry.by_address(tx['to'], sender_address=self.call_address)
token = ERC20Token(rpc, tx['to']) token = ERC20Token(chain_spec, rpc, tx['to'])
token_symbol = token.symbol token_symbol = token.symbol
token_decimals = token.decimals token_decimals = token.decimals
times = tx_times(tx['hash'], chain_spec) times = tx_times(tx['hash'], chain_spec)

View File

@ -12,6 +12,7 @@ from chainqueue.error import NotLocalTxError
# local imports # local imports
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
from cic_eth.db.models.base import SessionBase
celery_app = celery.current_app celery_app = celery.current_app

View File

@ -29,5 +29,5 @@ def connect(rpc, chain_spec, registry_address):
CICRegistry.address = registry_address CICRegistry.address = registry_address
registry = CICRegistry(chain_spec, rpc) registry = CICRegistry(chain_spec, rpc)
registry_address = registry.by_name('ContractRegistry') registry_address = registry.by_name('ContractRegistry')
return registry return registry

View File

@ -23,7 +23,6 @@ default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
argparser = argparse.ArgumentParser() argparser = argparse.ArgumentParser()
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)') argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format') argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use') argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
@ -59,6 +58,7 @@ args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'), 'CIC_CHAIN_SPEC': getattr(args, 'i'),
} }
# override args # override args
config.dict_override(args_override, 'cli')
config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL') config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
@ -67,7 +67,9 @@ celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=confi
queue = args.q queue = args.q
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = None
if config.get('CIC_CHAIN_SPEC') != None and config.get('CIC_CHAIN_SPEC') != '::':
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
admin_api = AdminApi(None) admin_api = AdminApi(None)
@ -82,6 +84,9 @@ def lock_names_to_flag(s):
# TODO: move each command to submodule # TODO: move each command to submodule
def main(): def main():
chain_spec_dict = None
if chain_spec != None:
chain_spec_dict = chain_spec.asdict()
if args.command == 'unlock': if args.command == 'unlock':
flags = lock_names_to_flag(args.flags) flags = lock_names_to_flag(args.flags)
if not is_checksum_address(args.address): if not is_checksum_address(args.address):
@ -91,7 +96,7 @@ def main():
'cic_eth.admin.ctrl.unlock', 'cic_eth.admin.ctrl.unlock',
[ [
None, None,
chain_spec.asdict(), chain_spec_dict,
args.address, args.address,
flags, flags,
], ],
@ -110,7 +115,7 @@ def main():
'cic_eth.admin.ctrl.lock', 'cic_eth.admin.ctrl.lock',
[ [
None, None,
chain_spec.asdict(), chain_spec_dict,
args.address, args.address,
flags, flags,
], ],

View File

@ -11,10 +11,19 @@ import websocket
# external imports # external imports
import celery import celery
import confini import confini
from chainlib.connection import RPCConnection from chainlib.connection import (
from chainlib.eth.connection import EthUnixSignerConnection RPCConnection,
ConnType,
)
from chainlib.eth.connection import (
EthUnixSignerConnection,
EthHTTPSignerConnection,
)
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from cic_eth_registry.error import UnknownContractError
import liveness.linux
# local imports # local imports
from cic_eth.eth import ( from cic_eth.eth import (
@ -51,6 +60,8 @@ from cic_eth.registry import (
connect_declarator, connect_declarator,
connect_token_registry, connect_token_registry,
) )
from cic_eth.task import BaseTask
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@ -62,6 +73,7 @@ argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provide
argparser.add_argument('-c', type=str, default=config_dir, help='config file') argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks') argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
argparser.add_argument('-r', type=str, help='CIC registry address') argparser.add_argument('-r', type=str, help='CIC registry address')
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi') argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage') argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
@ -81,6 +93,7 @@ config.process()
args_override = { args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'), 'CIC_CHAIN_SPEC': getattr(args, 'i'),
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'), 'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
'CIC_DEFAULT_TOKEN_SYMBOL': getattr(args, 'default_token_symbol'),
'ETH_PROVIDER': getattr(args, 'p'), 'ETH_PROVIDER': getattr(args, 'p'),
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'), 'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
} }
@ -90,14 +103,15 @@ config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL') config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(args.c, config)) logg.debug('config loaded from {}:\n{}'.format(args.c, config))
health_modules = config.get('CIC_HEALTH_MODULES', [])
if len(health_modules) != 0:
health_modules = health_modules.split(',')
logg.debug('health mods {}'.format(health_modules))
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
# verify database connection with minimal sanity query
session = SessionBase.create_session()
session.execute('select version_num from alembic_version')
session.close()
# set up celery # set up celery
current_app = celery.Celery(__name__) current_app = celery.Celery(__name__)
@ -134,11 +148,18 @@ else:
}) })
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, 'signer')
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, 'signer')
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, 'signer')
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default') RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 'signer', constructor=EthUnixSignerConnection) RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 'signer')
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS') Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
#import cic_eth.checks.gas
#if not cic_eth.checks.gas.health(config=config):
# raise RuntimeError()
liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker')
def main(): def main():
argv = ['worker'] argv = ['worker']
@ -162,7 +183,11 @@ def main():
rpc = RPCConnection.connect(chain_spec, 'default') rpc = RPCConnection.connect(chain_spec, 'default')
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) try:
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1)
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:
@ -171,10 +196,18 @@ def main():
trusted_addresses = trusted_addresses_src.split(',') trusted_addresses = trusted_addresses_src.split(',')
for address in trusted_addresses: for address in trusted_addresses:
logg.info('using trusted address {}'.format(address)) logg.info('using trusted address {}'.format(address))
connect_declarator(rpc, chain_spec, trusted_addresses) connect_declarator(rpc, chain_spec, trusted_addresses)
connect_token_registry(rpc, chain_spec) connect_token_registry(rpc, chain_spec)
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
BaseTask.run_dir = config.get('CIC_RUN_DIR')
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))
liveness.linux.set(rundir=config.get('CIC_RUN_DIR'))
current_app.worker_main(argv) current_app.worker_main(argv)
liveness.linux.reset(rundir=config.get('CIC_RUN_DIR'))
@celery.signals.eventlet_pool_postshutdown.connect @celery.signals.eventlet_pool_postshutdown.connect

View File

@ -0,0 +1,65 @@
#!python3
# SPDX-License-Identifier: GPL-3.0-or-later
# standard imports
import logging
import argparse
import os
# external imports
import confini
import celery
# local imports
from cic_eth.api import Api
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
default_format = 'terminal'
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
argparser = argparse.ArgumentParser()
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
args = argparser.parse_args()
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.dict_override(args_override, 'cli args')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
queue = args.q
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
def main():
t = api.default_token()
token_info = t.get()
print('Default token symbol: {}'.format(token_info['symbol']))
print('Default token address: {}'.format(token_info['address']))
if __name__ == '__main__':
main()

View File

@ -85,9 +85,6 @@ def main():
callback_queue=args.q, callback_queue=args.q,
) )
#register = not args.no_register
#logg.debug('register {}'.format(register))
#t = api.create_account(register=register)
t = api.transfer(config.get('_SENDER'), config.get('_RECIPIENT'), config.get('_VALUE'), config.get('_SYMBOL')) t = api.transfer(config.get('_SENDER'), config.get('_RECIPIENT'), config.get('_VALUE'), config.get('_SYMBOL'))
ps.get_message() ps.get_message()

View File

@ -81,10 +81,14 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
rpc = EthHTTPConnection(args.p) rpc = EthHTTPConnection(args.p)
registry_address = config.get('CIC_REGISTRY_ADDRESS') #registry_address = config.get('CIC_REGISTRY_ADDRESS')
admin_api = AdminApi(rpc) admin_api = AdminApi(rpc)
t = admin_api.registry()
registry_address = t.get()
logg.info('got registry address from task pool: {}'.format(registry_address))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS') logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
@ -151,14 +155,16 @@ def main():
txs = [] txs = []
renderer = render_tx renderer = render_tx
if len(config.get('_QUERY')) > 66: if len(config.get('_QUERY')) > 66:
registry = connect_registry(rpc, chain_spec, registry_address) #registry = connect_registry(rpc, chain_spec, registry_address)
admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry, renderer=renderer) #admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry, renderer=renderer)
admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), renderer=renderer)
elif len(config.get('_QUERY')) > 42: elif len(config.get('_QUERY')) > 42:
registry = connect_registry(rpc, chain_spec, registry_address) #registry = connect_registry(rpc, chain_spec, registry_address)
admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry, renderer=renderer) #admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry, renderer=renderer)
admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), renderer=renderer)
elif len(config.get('_QUERY')) == 42: elif len(config.get('_QUERY')) == 42:
registry = connect_registry(rpc, chain_spec, registry_address) #registry = connect_registry(rpc, chain_spec, registry_address)
txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False, renderer=render_account) txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False, renderer=render_account)
renderer = render_account renderer = render_account
elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock': elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock':

View File

@ -7,18 +7,20 @@ import uuid
# external imports # external imports
import celery import celery
import sqlalchemy import sqlalchemy
from chainlib.chain import ChainSpec
from chainlib.connection import RPCConnection
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import RPCGasOracle from chainlib.eth.gas import RPCGasOracle
from cic_eth_registry import CICRegistry
from cic_eth_registry.error import UnknownContractError
import liveness.linux
# local imports # local imports
from cic_eth.error import ( from cic_eth.error import SeppukuError
SignerError,
EthError,
)
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
logg = logging.getLogger(__name__) logg = logging.getLogger().getChild(__name__)
celery_app = celery.current_app celery_app = celery.current_app
@ -29,6 +31,9 @@ class BaseTask(celery.Task):
call_address = ZERO_ADDRESS call_address = ZERO_ADDRESS
create_nonce_oracle = RPCNonceOracle create_nonce_oracle = RPCNonceOracle
create_gas_oracle = RPCGasOracle create_gas_oracle = RPCGasOracle
default_token_address = None
default_token_symbol = None
run_dir = '/run'
def create_session(self): def create_session(self):
return BaseTask.session_func() return BaseTask.session_func()
@ -39,6 +44,19 @@ class BaseTask(celery.Task):
return return
def on_failure(self, exc, task_id, args, kwargs, einfo):
if isinstance(exc, SeppukuError):
liveness.linux.reset(rundir=self.run_dir)
logg.critical(einfo)
msg = 'received critical exception {}, calling shutdown'.format(str(exc))
s = celery.signature(
'cic_eth.admin.ctrl.shutdown',
[msg],
queue=self.request.delivery_info.get('routing_key'),
)
s.apply_async()
class CriticalTask(BaseTask): class CriticalTask(BaseTask):
retry_jitter = True retry_jitter = True
retry_backoff = True retry_backoff = True
@ -67,7 +85,6 @@ class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
sqlalchemy.exc.TimeoutError, sqlalchemy.exc.TimeoutError,
requests.exceptions.ConnectionError, requests.exceptions.ConnectionError,
sqlalchemy.exc.ResourceClosedError, sqlalchemy.exc.ResourceClosedError,
EthError,
) )
safe_gas_threshold_amount = 2000000000 * 60000 * 3 safe_gas_threshold_amount = 2000000000 * 60000 * 3
safe_gas_refill_amount = safe_gas_threshold_amount * 5 safe_gas_refill_amount = safe_gas_threshold_amount * 5
@ -78,19 +95,45 @@ class CriticalSQLAlchemyAndSignerTask(CriticalTask):
sqlalchemy.exc.DatabaseError, sqlalchemy.exc.DatabaseError,
sqlalchemy.exc.TimeoutError, sqlalchemy.exc.TimeoutError,
sqlalchemy.exc.ResourceClosedError, sqlalchemy.exc.ResourceClosedError,
SignerError,
) )
class CriticalWeb3AndSignerTask(CriticalTask): class CriticalWeb3AndSignerTask(CriticalTask):
autoretry_for = ( autoretry_for = (
requests.exceptions.ConnectionError, requests.exceptions.ConnectionError,
SignerError,
) )
safe_gas_threshold_amount = 2000000000 * 60000 * 3 safe_gas_threshold_amount = 2000000000 * 60000 * 3
safe_gas_refill_amount = safe_gas_threshold_amount * 5 safe_gas_refill_amount = safe_gas_threshold_amount * 5
@celery_app.task(bind=True, base=BaseTask) @celery_app.task()
def hello(self): def check_health(self):
time.sleep(0.1) pass
return id(SessionBase.create_session)
# TODO: registry / rpc methods should perhaps be moved to better named module
@celery_app.task()
def registry():
return CICRegistry.address
@celery_app.task()
def registry_address_lookup(chain_spec_dict, address, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn)
return registry.by_address(address)
@celery_app.task(throws=(UnknownContractError,))
def registry_name_lookup(chain_spec_dict, name, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag)
registry = CICRegistry(chain_spec, conn)
return registry.by_name(name)
@celery_app.task()
def rpc_proxy(chain_spec_dict, o, connection_tag='default'):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
conn = RPCConnection.connect(chain_spec, tag=connection_tag)
return conn.do(o)

View File

@ -10,7 +10,7 @@ version = (
0, 0,
11, 11,
0, 0,
'beta.6', 'beta.11',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -3,3 +3,6 @@ registry_address =
chain_spec = evm:bloxberg:8996 chain_spec = evm:bloxberg:8996
tx_retry_delay = tx_retry_delay =
trust_address = trust_address =
default_token_symbol = GFT
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
run_dir = /run

View File

@ -3,3 +3,6 @@ registry_address =
chain_spec = evm:bloxberg:8996 chain_spec = evm:bloxberg:8996
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
tx_retry_delay = 20 tx_retry_delay = 20
default_token_symbol = GFT
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
run_dir = /run

View File

@ -1,8 +1,3 @@
[eth] [eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:63545 provider = http://localhost:63545
gas_provider_address = gas_gifter_minimum_balance = 10000000000000000000000
#chain_id =
abi_dir = /home/lash/src/ext/cic/grassrootseconomics/cic-contracts/abis
account_accounts_index_writer =

View File

@ -1,5 +1,5 @@
[signer] [signer]
socket_path = /tmp/crypto-dev-signer/jsonrpc.ipc socket_path = ipc:///tmp/crypto-dev-signer/jsonrpc.ipc
secret = deedbeef secret = deedbeef
database_name = signer_test database_name = signer_test
dev_keys_path = dev_keys_path =

View File

@ -1,8 +1,3 @@
[eth] [eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545 provider = http://localhost:8545
gas_provider_address = gas_gifter_minimum_balance = 10000000000000000000000
#chain_id =
abi_dir = /usr/local/share/cic/solidity/abi
account_accounts_index_writer =

View File

@ -53,3 +53,5 @@ COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \ RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
mkdir -p /usr/local/share/cic/solidity && \ mkdir -p /usr/local/share/cic/solidity && \
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
COPY util/liveness/health.sh /usr/local/bin/health.sh

View File

@ -1,6 +1,6 @@
cic-base~=0.1.2a76 cic-base==0.1.2b5
celery==4.4.7 celery==4.4.7
crypto-dev-signer~=0.4.14b2 crypto-dev-signer~=0.4.14b3
confini~=0.3.6rc3 confini~=0.3.6rc3
cic-eth-registry~=0.5.4a16 cic-eth-registry~=0.5.4a16
#cic-bancor~=0.0.6 #cic-bancor~=0.0.6
@ -15,10 +15,10 @@ semver==2.13.0
websocket-client==0.57.0 websocket-client==0.57.0
moolb~=0.1.1b2 moolb~=0.1.1b2
eth-address-index~=0.1.1a9 eth-address-index~=0.1.1a9
chainlib~=0.0.2a13 chainlib~=0.0.2a20
hexathon~=0.0.1a7 hexathon~=0.0.1a7
chainsyncer[sql]~=0.0.2a2 chainsyncer[sql]~=0.0.2a2
chainqueue~=0.0.1a7 chainqueue~=0.0.2a2
pysha3==1.0.2 pysha3==1.0.2
coincurve==15.0.0 coincurve==15.0.0
sarafu-faucet==0.0.2a28 sarafu-faucet==0.0.2a28

View File

@ -38,6 +38,7 @@ packages =
cic_eth.runnable.daemons.filters cic_eth.runnable.daemons.filters
cic_eth.callbacks cic_eth.callbacks
cic_eth.sync cic_eth.sync
cic_eth.check
scripts = scripts =
./scripts/migrate.py ./scripts/migrate.py
@ -52,6 +53,7 @@ console_scripts =
cic-eth-create = cic_eth.runnable.create:main cic-eth-create = cic_eth.runnable.create:main
cic-eth-inspect = cic_eth.runnable.view:main cic-eth-inspect = cic_eth.runnable.view:main
cic-eth-ctl = cic_eth.runnable.ctrl:main cic-eth-ctl = cic_eth.runnable.ctrl:main
cic-eth-info = cic_eth.runnable.info:main
# TODO: Merge this with ctl when subcmds sorted to submodules # TODO: Merge this with ctl when subcmds sorted to submodules
cic-eth-tag = cic_eth.runnable.tag:main cic-eth-tag = cic_eth.runnable.tag:main
cic-eth-resend = cic_eth.runnable.resend:main cic-eth-resend = cic_eth.runnable.resend:main

View File

@ -3,8 +3,12 @@ import os
import sys import sys
import logging import logging
# external imports
from chainlib.eth.erc20 import ERC20
# local imports # local imports
from cic_eth.api import Api from cic_eth.api import Api
from cic_eth.task import BaseTask
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir) root_dir = os.path.dirname(script_dir)
@ -28,3 +32,26 @@ def api(
): ):
chain_str = str(default_chain_spec) chain_str = str(default_chain_spec)
return Api(chain_str, queue=None, callback_param='foo') return Api(chain_str, queue=None, callback_param='foo')
@pytest.fixture(scope='function')
def foo_token_symbol(
default_chain_spec,
foo_token,
eth_rpc,
contract_roles,
):
c = ERC20(default_chain_spec)
o = c.symbol(foo_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
r = eth_rpc.do(o)
return c.parse_symbol(r)
@pytest.fixture(scope='function')
def default_token(
foo_token,
foo_token_symbol,
):
BaseTask.default_token_symbol = foo_token_symbol
BaseTask.default_token_address = foo_token

View File

@ -210,9 +210,11 @@ def test_callback_filter(
def __init__(self): def __init__(self):
self.results = {} self.results = {}
self.queue = 'test'
def call_back(self, transfer_type, result): def call_back(self, transfer_type, result):
self.results[transfer_type] = result self.results[transfer_type] = result
return self
mock = CallbackMock() mock = CallbackMock()
fltr.call_back = mock.call_back fltr.call_back = mock.call_back

View File

@ -65,6 +65,7 @@ def test_tx(
tx_hash_hex_orig = tx_hash_hex tx_hash_hex_orig = tx_hash_hex
gas_oracle = OverrideGasOracle(price=1100000000, limit=21000) gas_oracle = OverrideGasOracle(price=1100000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create( queue_create(
default_chain_spec, default_chain_spec,

View File

@ -34,6 +34,7 @@ def celery_includes():
'cic_eth.admin.ctrl', 'cic_eth.admin.ctrl',
'cic_eth.admin.nonce', 'cic_eth.admin.nonce',
'cic_eth.admin.debug', 'cic_eth.admin.debug',
'cic_eth.admin.token',
'cic_eth.eth.account', 'cic_eth.eth.account',
'cic_eth.callbacks.noop', 'cic_eth.callbacks.noop',
'cic_eth.callbacks.http', 'cic_eth.callbacks.http',

View File

@ -53,6 +53,9 @@ def init_database(
alembic.command.downgrade(ac, 'base') alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head') alembic.command.upgrade(ac, 'head')
session.execute('DELETE FROM lock')
session.commit()
yield session yield session
session.commit() session.commit()
session.close() session.close()

View File

@ -273,7 +273,7 @@ def test_tx(
eth_signer, eth_signer,
agent_roles, agent_roles,
contract_roles, contract_roles,
celery_worker, celery_session_worker,
): ):
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)

View File

@ -35,7 +35,7 @@ def test_list_tx(
foo_token, foo_token,
register_tokens, register_tokens,
init_eth_tester, init_eth_tester,
celery_worker, celery_session_worker,
): ):
tx_hashes = [] tx_hashes = []

View File

@ -0,0 +1,21 @@
# external imports
import celery
def test_default_token(
default_token,
celery_session_worker,
foo_token,
foo_token_symbol,
):
s = celery.signature(
'cic_eth.admin.token.default_token',
[],
queue=None,
)
t = s.apply_async()
r = t.get()
assert r['address'] == foo_token
assert r['symbol'] == foo_token_symbol

View File

@ -3,4 +3,3 @@ dist
dist-web dist-web
dist-server dist-server
scratch scratch
tests

View File

@ -1,6 +1,6 @@
{ {
"name": "cic-client-meta", "name": "cic-client-meta",
"version": "0.0.7-alpha.2", "version": "0.0.7-alpha.7",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
@ -852,6 +852,75 @@
"printj": "~1.1.0" "printj": "~1.1.0"
} }
}, },
"crdt-meta": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/crdt-meta/-/crdt-meta-0.0.8.tgz",
"integrity": "sha512-CS0sS0L2QWthz7vmu6vzl3p4kcpJ+IKILBJ4tbgN4A3iNG8wnBeuDIv/z3KFFQjcfuP4QAh6E9LywKUTxtDc3g==",
"requires": {
"automerge": "^0.14.2",
"ini": "^1.3.8",
"openpgp": "^4.10.8",
"pg": "^8.5.1",
"sqlite3": "^5.0.2"
},
"dependencies": {
"automerge": {
"version": "0.14.2",
"resolved": "https://registry.npmjs.org/automerge/-/automerge-0.14.2.tgz",
"integrity": "sha512-shiwuJHCbNRI23WZyIECLV4Ovf3WiAFJ7P9BH4l5gON1In/UUbjcSJKRygtIirObw2UQumeYxp3F2XBdSvQHnA==",
"requires": {
"immutable": "^3.8.2",
"transit-immutable-js": "^0.7.0",
"transit-js": "^0.8.861",
"uuid": "^3.4.0"
}
},
"node-addon-api": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.1.0.tgz",
"integrity": "sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw=="
},
"pg": {
"version": "8.6.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.6.0.tgz",
"integrity": "sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==",
"requires": {
"buffer-writer": "2.0.0",
"packet-reader": "1.0.0",
"pg-connection-string": "^2.5.0",
"pg-pool": "^3.3.0",
"pg-protocol": "^1.5.0",
"pg-types": "^2.1.0",
"pgpass": "1.x"
}
},
"pg-connection-string": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
},
"pg-pool": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.3.0.tgz",
"integrity": "sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg=="
},
"pg-protocol": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
},
"sqlite3": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-5.0.2.tgz",
"integrity": "sha512-1SdTNo+BVU211Xj1csWa8lV6KM0CtucDwRyA0VHl91wEH1Mgh7RxUpI4rVvG7OhHrzCSGaVyW5g8vKvlrk9DJA==",
"requires": {
"node-addon-api": "^3.0.0",
"node-gyp": "3.x",
"node-pre-gyp": "^0.11.0"
}
}
}
},
"create-hash": { "create-hash": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
@ -966,17 +1035,17 @@
"dev": true "dev": true
}, },
"elliptic": { "elliptic": {
"version": "6.5.3", "version": "6.5.4",
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz", "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
"integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==", "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
"requires": { "requires": {
"bn.js": "^4.4.0", "bn.js": "^4.11.9",
"brorand": "^1.0.1", "brorand": "^1.1.0",
"hash.js": "^1.0.0", "hash.js": "^1.0.0",
"hmac-drbg": "^1.0.0", "hmac-drbg": "^1.0.1",
"inherits": "^2.0.1", "inherits": "^2.0.4",
"minimalistic-assert": "^1.0.0", "minimalistic-assert": "^1.0.1",
"minimalistic-crypto-utils": "^1.0.0" "minimalistic-crypto-utils": "^1.0.1"
} }
}, },
"emoji-regex": { "emoji-regex": {
@ -1489,9 +1558,9 @@
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
}, },
"ini": { "ini": {
"version": "1.3.5", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
}, },
"interpret": { "interpret": {
"version": "2.2.0", "version": "2.2.0",
@ -1957,9 +2026,9 @@
} }
}, },
"y18n": { "y18n": {
"version": "4.0.0", "version": "4.0.3",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
"dev": true "dev": true
}, },
"yargs": { "yargs": {

View File

@ -1,6 +1,6 @@
{ {
"name": "cic-client-meta", "name": "cic-client-meta",
"version": "0.0.7-alpha.6", "version": "0.0.7-alpha.8",
"description": "Signed CRDT metadata graphs for the CIC network", "description": "Signed CRDT metadata graphs for the CIC network",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
@ -15,8 +15,9 @@
"dependencies": { "dependencies": {
"@ethereumjs/tx": "^3.0.0-beta.1", "@ethereumjs/tx": "^3.0.0-beta.1",
"automerge": "^0.14.1", "automerge": "^0.14.1",
"crdt-meta": "0.0.8",
"ethereumjs-wallet": "^1.0.1", "ethereumjs-wallet": "^1.0.1",
"ini": "^1.3.5", "ini": "^1.3.8",
"openpgp": "^4.10.8", "openpgp": "^4.10.8",
"pg": "^8.4.2", "pg": "^8.4.2",
"sqlite3": "^5.0.0", "sqlite3": "^5.0.0",
@ -40,6 +41,6 @@
], ],
"license": "GPL-3.0-or-later", "license": "GPL-3.0-or-later",
"engines": { "engines": {
"node": "~14.16.1" "node": ">=14.16.1"
} }
} }

View File

@ -1,4 +1,4 @@
const config = require('./src/config'); import { Config } from 'crdt-meta';
const fs = require('fs'); const fs = require('fs');
if (process.argv[2] === undefined) { if (process.argv[2] === undefined) {
@ -15,6 +15,6 @@ try {
process.exit(1); process.exit(1);
} }
const c = new config.Config(process.argv[2], process.env['CONFINI_ENV_PREFIX']); const c = new Config(process.argv[2], process.env['CONFINI_ENV_PREFIX']);
c.process(); c.process();
process.stdout.write(c.toString()); process.stdout.write(c.toString());

View File

@ -1,8 +1,7 @@
import * as Automerge from 'automerge'; import * as Automerge from 'automerge';
import * as pgp from 'openpgp'; import * as pgp from 'openpgp';
import * as pg from 'pg';
import { Envelope, Syncable } from '../../src/sync'; import { Envelope, Syncable } from 'crdt-meta';
function handleNoMergeGet(db, digest, keystore) { function handleNoMergeGet(db, digest, keystore) {

View File

@ -1,15 +1,11 @@
import * as http from 'http'; import * as http from 'http';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as pgp from 'openpgp';
import * as handlers from './handlers'; import * as handlers from './handlers';
import { Envelope, Syncable } from '../../src/sync'; import { PGPKeyStore, PGPSigner, Config, SqliteAdapter, PostgresAdapter } from 'crdt-meta';
import { PGPKeyStore, PGPSigner } from '../../src/auth';
import { standardArgs } from './args'; import { standardArgs } from './args';
import { Config } from '../../src/config';
import { SqliteAdapter, PostgresAdapter } from '../../src/db';
let configPath = '/usr/local/etc/cic-meta'; let configPath = '/usr/local/etc/cic-meta';

View File

@ -1,191 +0,0 @@
import * as pgp from 'openpgp';
import * as crypto from 'crypto';
interface Signable {
digest():string;
}
type KeyGetter = () => any;
type Signature = {
engine:string
algo:string
data:string
digest:string
}
interface Signer {
prepare(Signable):boolean;
onsign(Signature):void;
onverify(boolean):void;
sign(digest:string):void
verify(digest:string, signature:Signature):void
fingerprint():string
}
interface Authoritative {
}
interface KeyStore {
getPrivateKey: KeyGetter
getFingerprint: () => string
getTrustedKeys: () => Array<any>
getTrustedActiveKeys: () => Array<any>
getEncryptKeys: () => Array<any>
}
class PGPKeyStore implements KeyStore {
fingerprint: string
pk: any
pubk = {
active: [],
trusted: [],
encrypt: [],
}
loads = 0x00;
loadsTarget = 0x0f;
onload: (k:KeyStore) => void;
constructor(passphrase:string, pkArmor:string, pubkActiveArmor:string, pubkTrustedArmor:string, pubkEncryptArmor:string, onload = (ks:KeyStore) => {}) {
this._readKey(pkArmor, undefined, 1, passphrase);
this._readKey(pubkActiveArmor, 'active', 2);
this._readKey(pubkTrustedArmor, 'trusted', 4);
this._readKey(pubkEncryptArmor, 'encrypt', 8);
this.onload = onload;
}
private _readKey(a:string, x:any, n:number, pass?:string) {
pgp.key.readArmored(a).then((k) => {
if (pass !== undefined) {
this.pk = k.keys[0];
this.pk.decrypt(pass).then(() => {
this.fingerprint = this.pk.getFingerprint();
console.log('private key (sign)', this.fingerprint);
this._registerLoad(n);
});
} else {
this.pubk[x] = k.keys;
k.keys.forEach((pubk) => {
console.log('public key (' + x + ')', pubk.getFingerprint());
});
this._registerLoad(n);
}
});
}
private _registerLoad(b:number) {
this.loads |= b;
if (this.loads == this.loadsTarget) {
this.onload(this);
}
}
public getTrustedKeys(): Array<any> {
return this.pubk['trusted'];
}
public getTrustedActiveKeys(): Array<any> {
return this.pubk['active'];
}
public getEncryptKeys(): Array<any> {
return this.pubk['encrypt'];
}
public getPrivateKey(): any {
return this.pk;
}
public getFingerprint(): string {
return this.fingerprint;
}
}
class PGPSigner implements Signer {
engine = 'pgp'
algo = 'sha256'
dgst: string
signature: Signature
keyStore: KeyStore
onsign: (Signature) => void
onverify: (boolean) => void
constructor(keyStore:KeyStore) {
this.keyStore = keyStore
this.onsign = (string) => {};
this.onverify = (boolean) => {};
}
public fingerprint(): string {
return this.keyStore.getFingerprint();
}
public prepare(material:Signable):boolean {
this.dgst = material.digest();
return true;
}
public verify(digest:string, signature:Signature) {
pgp.signature.readArmored(signature.data).then((s) => {
const opts = {
message: pgp.cleartext.fromText(digest),
publicKeys: this.keyStore.getTrustedKeys(),
signature: s,
};
pgp.verify(opts).then((v) => {
let i = 0;
for (i = 0; i < v.signatures.length; i++) {
const s = v.signatures[i];
if (s.valid) {
this.onverify(s);
return;
}
}
console.error('checked ' + i + ' signature(s) but none valid');
this.onverify(false);
});
}).catch((e) => {
console.error(e);
this.onverify(false);
});
}
public sign(digest:string) {
const m = pgp.cleartext.fromText(digest);
const pk = this.keyStore.getPrivateKey();
const opts = {
message: m,
privateKeys: [pk],
detached: true,
}
pgp.sign(opts).then((s) => {
this.signature = {
engine: this.engine,
algo: this.algo,
data: s.signature,
// TODO: fix for browser later
digest: digest,
};
this.onsign(this.signature);
}).catch((e) => {
console.error(e);
this.onsign(undefined);
});
}
}
export {
Signature,
Authoritative,
Signer,
KeyGetter,
Signable,
KeyStore,
PGPSigner,
PGPKeyStore,
};

View File

@ -1,71 +0,0 @@
import * as fs from 'fs';
import * as ini from 'ini';
import * as path from 'path';
class Config {
filepath: string
store: Object
censor: Array<string>
require: Array<string>
env_prefix: string
constructor(filepath:string, env_prefix?:string) {
this.filepath = filepath;
this.store = {};
this.censor = [];
this.require = [];
this.env_prefix = '';
if (env_prefix !== undefined) {
this.env_prefix = env_prefix + "_";
}
}
public process() {
const d = fs.readdirSync(this.filepath);
const r = /.*\.ini$/;
for (let i = 0; i < d.length; i++) {
const f = d[i];
if (!f.match(r)) {
return;
}
const fp = path.join(this.filepath, f);
const v = fs.readFileSync(fp, 'utf-8');
const inid = ini.decode(v);
const inik = Object.keys(inid);
for (let j = 0; j < inik.length; j++) {
const k_section = inik[j]
const k = k_section.toUpperCase();
Object.keys(inid[k_section]).forEach((k_directive) => {
const kk = k_directive.toUpperCase();
const kkk = k + '_' + kk;
let r = inid[k_section][k_directive];
const k_env = this.env_prefix + kkk
const env = process.env[k_env];
if (env !== undefined) {
console.debug('Environment variable ' + k_env + ' overrides ' + kkk);
r = env;
}
this.store[kkk] = r;
});
}
}
}
public get(s:string) {
return this.store[s];
}
public toString() {
let s = '';
Object.keys(this.store).forEach((k) => {
s += k + '=' + this.store[k] + '\n';
});
return s;
}
}
export { Config };

View File

@ -1,38 +0,0 @@
import { JSONSerializable } from './format';
const ENGINE_NAME = 'automerge';
const ENGINE_VERSION = '0.14.1';
const NETWORK_NAME = 'cic';
const NETWORK_VERSION = '1';
const CRYPTO_NAME = 'pgp';
const CRYPTO_VERSION = '2';
type VersionedSpec = {
name: string
version: string
ext?: Object
}
const engineSpec:VersionedSpec = {
name: ENGINE_NAME,
version: ENGINE_VERSION,
}
const cryptoSpec:VersionedSpec = {
name: CRYPTO_NAME,
version: CRYPTO_VERSION,
}
const networkSpec:VersionedSpec = {
name: NETWORK_NAME,
version: NETWORK_VERSION,
}
export {
engineSpec,
cryptoSpec,
networkSpec,
VersionedSpec,
};

View File

@ -1,27 +0,0 @@
import * as crypto from 'crypto';
const _algs = {
'SHA-256': 'sha256',
}
function cryptoWrapper() {
}
cryptoWrapper.prototype.digest = async function(s, d) {
const h = crypto.createHash(_algs[s]);
h.update(d);
return h.digest();
}
let subtle = undefined;
if (typeof window !== 'undefined') {
subtle = window.crypto.subtle;
} else {
subtle = new cryptoWrapper();
}
export {
subtle,
}

View File

@ -1,90 +0,0 @@
import * as pg from 'pg';
import * as sqlite from 'sqlite3';
type DbConfig = {
name: string
host: string
port: number
user: string
password: string
}
interface DbAdapter {
query: (s:string, callback:(e:any, rs:any) => void) => void
close: () => void
}
const re_creatematch = /^(CREATE)/i
const re_getmatch = /^(SELECT)/i;
const re_setmatch = /^(INSERT|UPDATE)/i;
class SqliteAdapter implements DbAdapter {
db: any
constructor(dbConfig:DbConfig, callback?:(any) => void) {
this.db = new sqlite.Database(dbConfig.name); //, callback);
}
public query(s:string, callback:(e:any, rs?:any) => void): void {
const local_callback = (e, rs) => {
let r = undefined;
if (rs !== undefined) {
r = {
rowCount: rs.length,
rows: rs,
}
}
callback(e, r);
};
if (s.match(re_getmatch)) {
this.db.all(s, local_callback);
} else if (s.match(re_setmatch)) {
this.db.run(s, local_callback);
} else if (s.match(re_creatematch)) {
this.db.run(s, callback);
} else {
throw 'unhandled query';
}
}
public close() {
this.db.close();
}
}
class PostgresAdapter implements DbAdapter {
db: any
constructor(dbConfig:DbConfig) {
let o = dbConfig;
o['database'] = o.name;
this.db = new pg.Pool(o);
return this.db;
}
public query(s:string, callback:(e:any, rs:any) => void): void {
this.db.query(s, (e, rs) => {
let r = {
length: rs.rowCount,
}
rs.length = rs.rowCount;
if (e === undefined) {
e = null;
}
console.debug(e, rs);
callback(e, rs);
});
}
public close() {
this.db.end();
}
}
export {
DbConfig,
SqliteAdapter,
PostgresAdapter,
}

View File

@ -1,68 +0,0 @@
import * as crypto from './crypto';
interface Addressable {
key(): string
digest(): string
}
function stringToBytes(s:string) {
const a = new Uint8Array(20);
let j = 2;
for (let i = 0; i < a.byteLength; i++) {
const n = parseInt(s.substring(j, j+2), 16);
a[i] = n;
j += 2;
}
return a;
}
function bytesToHex(a:Uint8Array) {
let s = '';
for (let i = 0; i < a.byteLength; i++) {
const h = '00' + a[i].toString(16);
s += h.slice(-2);
}
return s;
}
async function mergeKey(a:Uint8Array, s:Uint8Array) {
const y = new Uint8Array(a.byteLength + s.byteLength);
for (let i = 0; i < a.byteLength; i++) {
y[i] = a[i];
}
for (let i = 0; i < s.byteLength; i++) {
y[a.byteLength + i] = s[i];
}
const z = await crypto.subtle.digest('SHA-256', y);
return bytesToHex(new Uint8Array(z));
}
async function toKey(v:string, salt:string) {
const a = stringToBytes(v);
const s = new TextEncoder().encode(salt);
return await mergeKey(a, s);
}
async function toAddressKey(zeroExHex:string, salt:string) {
const a = addressToBytes(zeroExHex);
const s = new TextEncoder().encode(salt);
return await mergeKey(a, s);
}
const re_addrHex = /^0[xX][a-fA-F0-9]{40}$/;
function addressToBytes(s:string) {
if (!s.match(re_addrHex)) {
throw 'invalid address hex';
}
return stringToBytes(s);
}
export {
toKey,
toAddressKey,
mergeKey,
bytesToHex,
addressToBytes,
Addressable,
}

View File

@ -1,58 +0,0 @@
import { v4 as uuidv4 } from 'uuid';
import { Syncable } from './sync';
import { Store } from './store';
import { PubSub } from './transport';
function toIndexKey(id:string):string {
const d = Date.now();
return d + '_' + id + '_' + uuidv4();
}
const _re_indexKey = /^\d+_(.+)_[-\d\w]+$/;
function fromIndexKey(s:string):string {
const m = s.match(_re_indexKey);
if (m === null) {
throw 'Invalid index key';
}
return m[1];
}
class Dispatcher {
idx: Array<string>
syncer: PubSub
store: Store
constructor(store:Store, syncer:PubSub) {
this.idx = new Array<string>()
this.syncer = syncer;
this.store = store;
}
public isDirty(): boolean {
return this.idx.length > 0;
}
public add(id:string, item:Syncable): string {
const v = item.toJSON();
const k = toIndexKey(id);
this.store.put(k, v, true);
localStorage.setItem(k, v);
this.idx.push(k);
return k;
}
public sync(offset:number): number {
let i = 0;
this.idx.forEach((k) => {
const v = localStorage.getItem(k);
const k_id = fromIndexKey(k);
this.syncer.pub(v); // this must block until guaranteed delivery
localStorage.removeItem(k);
i++;
});
return i;
}
}
export { Dispatcher, toIndexKey, fromIndexKey }

View File

@ -1,5 +0,0 @@
interface JSONSerializable {
toJSON(): string
}
export { JSONSerializable };

View File

@ -1,5 +1,2 @@
export { PGPSigner, PGPKeyStore, Signer, KeyStore } from './auth'; export { User } from './user';
export { ArgPair,  Envelope, Syncable } from './sync'; export { Phone } from './phone';
export { User } from './assets/user';
export { Phone } from './assets/phone';
export { Config } from './config';

View File

@ -1,5 +1,4 @@
import { ArgPair, Syncable } from '../sync'; import { Syncable, Addressable, mergeKey } from 'crdt-meta';
import { Addressable, mergeKey } from '../digest';
class Phone extends Syncable implements Addressable { class Phone extends Syncable implements Addressable {

View File

@ -1,9 +0,0 @@
import { Syncable } from './sync';
interface Store {
put(string, Syncable, boolean?)
get(string):Syncable
delete(string)
}
export { Store };

View File

@ -1,266 +0,0 @@
import * as Automerge from 'automerge';
import { JSONSerializable } from './format';
import { Authoritative, Signer, PGPSigner, Signable, Signature } from './auth';
import { engineSpec, cryptoSpec, networkSpec, VersionedSpec } from './constants';
const fullSpec:VersionedSpec = {
name: 'cic',
version: '1',
ext: {
network: cryptoSpec,
engine: engineSpec,
},
}
class Envelope {
o = fullSpec
constructor(payload:Object) {
this.set(payload);
}
public set(payload:Object) {
this.o['payload'] = payload
}
public get():string {
return this.o['payload'];
}
public toJSON() {
return JSON.stringify(this.o);
}
public static fromJSON(s:string): Envelope {
const e = new Envelope(undefined);
e.o = JSON.parse(s);
return e;
}
public unwrap(): Syncable {
return Syncable.fromJSON(this.o['payload']);
}
}
class ArgPair {
k:string
v:any
constructor(k:string, v:any) {
this.k = k;
this.v = v;
}
}
class SignablePart implements Signable {
s: string
constructor(s:string) {
this.s = s;
}
public digest():string {
return this.s;
}
}
function orderDict(src) {
let dst;
if (Array.isArray(src)) {
dst = [];
src.forEach((v) => {
if (typeof(v) == 'object') {
v = orderDict(v);
}
dst.push(v);
});
} else {
dst = {}
Object.keys(src).sort().forEach((k) => {
let v = src[k];
if (typeof(v) == 'object') {
v = orderDict(v);
}
dst[k] = v;
});
}
return dst;
}
class Syncable implements JSONSerializable, Authoritative, Signable {
id: string
timestamp: number
m: any // automerge object
e: Envelope
signer: Signer
onwrap: (string) => void
onauthenticate: (boolean) => void
// TODO: Move data to sub-object so timestamp, id, signature don't collide
constructor(id:string, v:Object) {
this.id = id;
const o = {
'id': id,
'timestamp': Math.floor(Date.now() / 1000),
'data': v,
}
//this.m = Automerge.from(v)
this.m = Automerge.from(o)
}
public setSigner(signer:Signer) {
this.signer = signer;
this.signer.onsign = (s) => {
this.wrap(s);
};
}
// TODO: To keep integrity, the non-link key/value pairs for each step also need to be hashed
public digest(): string {
const links = [];
Automerge.getAllChanges(this.m).forEach((ch:Object) => {
const op:Array<any> = ch['ops'];
ch['ops'].forEach((op:Array<Object>) => {
if (op['action'] == 'link') {
//console.log('op link', op);
links.push([op['obj'], op['value']]);
}
});
});
//return JSON.stringify(links);
const j = JSON.stringify(links);
return Buffer.from(j).toString('base64');
}
private wrap(s:any) {
this.m = Automerge.change(this.m, 'sign', (doc) => {
doc['signature'] = s;
});
this.e = new Envelope(this.toJSON());
console.log('wrappin s', s, typeof(s));
this.e.o['digest'] = s.digest;
if (this.onwrap !== undefined) {
this.onwrap(this.e);
}
}
// private _verifyLoop(i:number, history:Array<any>, signable:Signable, result:boolean) {
// if (!result) {
// this.onauthenticate(false);
// return;
// } else if (history.length == 0) {
// this.onauthenticate(true);
// return;
// }
// const h = history.shift()
// if (i % 2 == 0) {
// i++;
// signable = {
// digest: () => {
// return Automerge.save(h.snapshot)
// },
// };
// this._verifyLoop(i, history, signable, true);
// } else {
// i++;
// const signature = h.snapshot['signature'];
// console.debug('signature', signature, signable.digest());
// this.signer.onverify = (v) => {
// this._verifyLoop(i, history, signable, v)
// }
// this.signer.verify(signable, signature);
// }
// }
//
// // TODO: This should replay the graph and check signatures on each step
// public _authenticate(full:boolean=false) {
// let h = Automerge.getHistory(this.m);
// h.forEach((m) => {
// //console.debug(m.snapshot);
// });
// const signable = {
// digest: () => { return '' },
// }
// if (!full) {
// h = h.slice(h.length-2);
// }
// this._verifyLoop(0, h, signable, true);
// }
public authenticate(full:boolean=false) {
if (full) {
console.warn('only doing shallow authentication for now, sorry');
}
//console.log('authenticating', signable.digest());
//console.log('signature', this.m.signature);
this.signer.onverify = (v) => {
//this._verifyLoop(i, history, signable, v)
this.onauthenticate(v);
}
this.signer.verify(this.m.signature.digest, this.m.signature);
}
public sign() {
//this.signer.prepare(this);
this.signer.sign(this.digest());
}
public update(changes:Array<ArgPair>, changesDescription:string) {
this.m = Automerge.change(this.m, changesDescription, (m) => {
changes.forEach((c) => {
let path = c.k.split('.');
let target = m['data'];
while (path.length > 1) {
const part = path.shift();
target = target[part];
}
target[path[0]] = c.v;
});
m['timestamp'] = Math.floor(Date.now() / 1000);
});
}
public replace(o:Object, changesDescription:string) {
this.m = Automerge.change(this.m, changesDescription, (m) => {
Object.keys(o).forEach((k) => {
m['data'][k] = o[k];
});
Object.keys(m).forEach((k) => {
if (o[k] == undefined) {
delete m['data'][k];
}
});
m['timestamp'] = Math.floor(Date.now() / 1000);
});
}
public merge(s:Syncable) {
this.m = Automerge.merge(s.m, this.m);
}
public toJSON(): string {
const s = Automerge.save(this.m);
const o = JSON.parse(s);
const oo = orderDict(o)
return JSON.stringify(oo);
}
public static fromJSON(s:string): Syncable {
const doc = Automerge.load(s);
let y = new Syncable(doc['id'], {});
y.m = doc
return y
}
}
export { JSONSerializable, Syncable, ArgPair, Envelope };

View File

@ -1,11 +0,0 @@
interface SubConsumer {
post(string)
}
interface PubSub {
pub(v:string):boolean
close()
}
export { PubSub, SubConsumer };

View File

@ -1,5 +1,4 @@
import { ArgPair, Syncable } from '../sync'; import { Syncable, Addressable, toAddressKey } from 'crdt-meta';
import { Addressable, addressToBytes, bytesToHex, toAddressKey } from '../digest';
const keySalt = new TextEncoder().encode(':cic.person'); const keySalt = new TextEncoder().encode(':cic.person');
class User extends Syncable implements Addressable { class User extends Syncable implements Addressable {

View File

@ -1,50 +0,0 @@
import * as Automerge from 'automerge';
import assert = require('assert');
import { Dispatcher, toIndexKey, fromIndexKey } from '../src/dispatch';
import { User } from '../src/assets/user';
import { Syncable, ArgPair } from '../src/sync';
import { MockSigner, MockStore } from './mock';
describe('basic', () => {
it('store', () => {
const store = new MockStore('s');
assert.equal(store.name, 's');
const mockSigner = new MockSigner();
const v = new Syncable('foo', {baz: 42});
v.setSigner(mockSigner);
store.put('foo', v);
const one = store.get('foo').toJSON();
const vv = new Syncable('bar', {baz: 666});
vv.setSigner(mockSigner);
assert.throws(() => {
store.put('foo', vv)
});
store.put('foo', vv, true);
const other = store.get('foo').toJSON();
assert.notEqual(one, other);
store.delete('foo');
assert.equal(store.get('foo'), undefined);
});
it('add_doc_to_dispatcher', () => {
const store = new MockStore('s');
//const syncer = new MockSyncer();
const dispatcher = new Dispatcher(store, undefined);
const user = new User('foo');
dispatcher.add(user.id, user);
assert(dispatcher.isDirty());
});
it('dispatch_keyindex', () => {
const s = 'foo';
const k = toIndexKey(s);
const v = fromIndexKey(k);
assert.equal(s, v);
});
});

View File

@ -1,212 +0,0 @@
import * as Automerge from 'automerge';
import assert = require('assert');
import * as pgp from 'openpgp';
import * as fs from 'fs';
import { PGPSigner } from '../src/auth';
import { Syncable, ArgPair } from '../src/sync';
import { MockKeyStore, MockSigner } from './mock';
describe('sync', async () => {
it('sync_merge', () => {
const mockSigner = new MockSigner();
const s = new Syncable('foo', {
bar: 'baz',
});
s.setSigner(mockSigner);
const changePair = new ArgPair('xyzzy', 42);
s.update([changePair], 'ch-ch-cha-changes');
assert.equal(s.m.data['xyzzy'], 42)
assert.equal(s.m.data['bar'], 'baz')
assert.equal(s.m['id'], 'foo')
assert.equal(Automerge.getHistory(s.m).length, 2);
});
it('sync_serialize', () => {
const mockSigner = new MockSigner();
const s = new Syncable('foo', {
bar: 'baz',
});
s.setSigner(mockSigner);
const j = s.toJSON();
const ss = Syncable.fromJSON(j);
assert.equal(ss.m['id'], 'foo');
assert.equal(ss.m['data']['bar'], 'baz');
assert.equal(Automerge.getHistory(ss.m).length, 1);
});
it('sync_sign_and_wrap', () => {
const mockSigner = new MockSigner();
const s = new Syncable('foo', {
bar: 'baz',
});
s.setSigner(mockSigner);
s.onwrap = (e) => {
const j = e.toJSON();
const v = JSON.parse(j);
assert.deepEqual(v.payload, e.o.payload);
}
s.sign();
});
it('sync_verify_success', async () => {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
const pks = await pgp.key.readArmored(pksa);
await pks.keys[0].decrypt('merman');
await pks.keys[1].decrypt('beastman');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
const pubks = await pgp.key.readArmored(pubksa);
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
const oneSigner = new PGPSigner(oneStore);
const twoSigner = new PGPSigner(twoStore);
const threeSigner = new PGPSigner(threeStore);
const x = new Syncable('foo', {
bar: 'baz',
});
x.setSigner(oneSigner);
// TODO: make this look better
x.onwrap = (e) => {
let updateData = new ArgPair('bar', 'xyzzy');
x.update([updateData], 'change one');
x.onwrap = (e) => {
x.setSigner(twoSigner);
updateData = new ArgPair('bar', 42);
x.update([updateData], 'change two');
x.onwrap = (e) => {
const p = e.unwrap();
p.setSigner(twoSigner);
p.onauthenticate = (v) => {
assert(v);
}
p.authenticate();
}
x.sign();
};
x.sign();
}
x.sign();
});
it('sync_verify_fail', async () => {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
const pks = await pgp.key.readArmored(pksa);
await pks.keys[0].decrypt('merman');
await pks.keys[1].decrypt('beastman');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
const pubks = await pgp.key.readArmored(pubksa);
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
const oneSigner = new PGPSigner(oneStore);
const twoSigner = new PGPSigner(twoStore);
const threeSigner = new PGPSigner(threeStore);
const x = new Syncable('foo', {
bar: 'baz',
});
x.setSigner(oneSigner);
// TODO: make this look better
x.onwrap = (e) => {
let updateData = new ArgPair('bar', 'xyzzy');
x.update([updateData], 'change one');
x.onwrap = (e) => {
x.setSigner(twoSigner);
updateData = new ArgPair('bar', 42);
x.update([updateData], 'change two');
x.onwrap = (e) => {
const p = e.unwrap();
p.setSigner(threeSigner);
p.onauthenticate = (v) => {
assert(!v);
}
p.authenticate();
}
x.sign();
};
x.sign();
}
x.sign();
});
xit('sync_verify_shallow_tricked', async () => {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
const pks = await pgp.key.readArmored(pksa);
await pks.keys[0].decrypt('merman');
await pks.keys[1].decrypt('beastman');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
const pubks = await pgp.key.readArmored(pubksa);
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
const oneSigner = new PGPSigner(oneStore);
const twoSigner = new PGPSigner(twoStore);
const threeSigner = new PGPSigner(threeStore);
const x = new Syncable('foo', {
bar: 'baz',
});
x.setSigner(twoSigner);
// TODO: make this look better
x.onwrap = (e) => {
let updateData = new ArgPair('bar', 'xyzzy');
x.update([updateData], 'change one');
x.onwrap = (e) => {
updateData = new ArgPair('bar', 42);
x.update([updateData], 'change two');
x.setSigner(oneSigner);
x.onwrap = (e) => {
const p = e.unwrap();
p.setSigner(threeSigner);
p.onauthenticate = (v) => {
assert(v);
p.onauthenticate = (v) => {
assert(!v);
}
p.authenticate(true);
}
p.authenticate();
}
x.sign();
};
x.sign();
}
x.sign();
});
});

View File

@ -1,14 +0,0 @@
import * as assert from 'assert';
import { MockPubSub, MockConsumer } from './mock';
describe('transport', () => {
it('pub_sub', () => {
const c = new MockConsumer();
const ps = new MockPubSub('foo', c);
ps.pub('foo');
ps.pub('bar');
ps.flush();
assert.deepEqual(c.omnoms, ['foo', 'bar']);
});
});

View File

@ -1,46 +0,0 @@
import assert = require('assert');
import pgp = require('openpgp');
import crypto = require('crypto');
import { Syncable, ArgPair } from '../src/sync';
import { MockKeyStore, MockSignable } from './mock';
import { PGPSigner } from '../src/auth';
describe('auth', async () => {
await it('digest', async () => {
const opts = {
userIds: [
{
name: 'John Marston',
email: 'red@dead.com',
},
],
numBits: 2048,
passphrase: 'foo',
};
const pkgen = await pgp.generateKey(opts);
const pka = pkgen.privateKeyArmored;
const pks = await pgp.key.readArmored(pka);
await pks.keys[0].decrypt('foo');
const pubka = pkgen.publicKeyArmored;
const pubks = await pgp.key.readArmored(pubka);
const keyStore = new MockKeyStore(pks.keys[0], pubks.keys);
const s = new PGPSigner(keyStore);
const message = await pgp.cleartext.fromText('foo');
s.onverify = (ok) => {
assert(ok);
}
s.onsign = (signature) => {
s.onverify((v) => {
console.log('bar', v);
});
s.verify('foo', signature);
}
await s.sign('foo');
});
});

View File

@ -1,47 +0,0 @@
import * as assert from 'assert';
import * as pgp from 'openpgp';
import { Dispatcher } from '../src/dispatch';
import { User } from '../src/assets/user';
import { PGPSigner, KeyStore } from '../src/auth';
import { SubConsumer } from '../src/transport';
import { MockStore, MockPubSub, MockConsumer, MockKeyStore } from './mock';
async function createKeyStore() {
const opts = {
userIds: [
{
name: 'John Marston',
email: 'red@dead.com',
},
],
numBits: 2048,
passphrase: 'foo',
};
const pkgen = await pgp.generateKey(opts);
const pka = pkgen.privateKeyArmored;
const pks = await pgp.key.readArmored(pka);
await pks.keys[0].decrypt('foo');
return new MockKeyStore(pks.keys[0], []);
}
describe('fullchain', async () => {
it('dispatch_and_publish_user', async () => {
const g = await createKeyStore();
const n = new PGPSigner(g);
const u = new User('u1', {});
u.setSigner(n);
u.setName('Nico', 'Bellic');
const s = new MockStore('fooStore');
const c = new MockConsumer();
const p = new MockPubSub('fooPubSub', c);
const d = new Dispatcher(s, p);
u.onwrap = (e) => {
d.add(u.id, e);
d.sync(0);
assert.equal(p.pubs.length, 1);
};
u.sign();
});
});

View File

@ -1,150 +0,0 @@
import * as crypto from 'crypto';
import { Signable, Signature, KeyStore } from '../src/auth';
import { Store } from '../src/store';
import { PubSub, SubConsumer } from '../src/transport';
import { Syncable } from '../src/sync';
class MockStore implements Store {
contents: Object
name: string
constructor(name:string) {
this.name = name;
this.contents = {};
}
public put(k:string, v:Syncable, existsOk = false) {
if (!existsOk && this.contents[k] !== undefined) {
throw '"' + k + '" already exists in store ' + this.name;
} 
this.contents[k] = v;
}
public get(k:string): Syncable {
return this.contents[k];
}
public delete(k:string) {
delete this.contents[k];
}
}
class MockSigner {
onsign: (string) => void
onverify: (boolean) => void
public verify(src:string, signature:Signature) {
return true;
}
public sign(s:string):boolean {
this.onsign('there would be a signature here');
return true;
}
public prepare(m:Signable):boolean {
return true;
}
public fingerprint():string {
return '';
}
}
class MockConsumer implements SubConsumer {
omnoms: Array<string>
constructor() {
this.omnoms = Array<string>();
}
public post(v:string) {
this.omnoms.push(v);
}
}
class MockPubSub implements PubSub {
pubs: Array<string>
consumer: SubConsumer
constructor(name:string, consumer:SubConsumer) {
this.pubs = Array<string>();
this.consumer = consumer;
}
public pub(v:string): boolean {
this.pubs.push(v);
return true;
}
public flush() {
while (this.pubs.length > 0) {
const s = this.pubs.shift();
this.consumer.post(s);
}
}
public close() {
}
}
class MockSignable implements Signable {
src: string
dst: string
constructor(src:string) {
this.src = src;
}
public digest():string {
const h = crypto.createHash('sha256');
h.update(this.src);
this.dst= h.digest('hex');
return this.dst;
}
}
class MockKeyStore implements KeyStore {
pk: any
pubks: Array<any>
constructor(pk:any, pubks:Array<any>) {
this.pk = pk;
this.pubks = pubks;
}
public getPrivateKey(): any {
return this.pk;
}
public getTrustedKeys(): Array<any> {
return this.pubks;
}
public getTrustedActiveKeys(): Array<any> {
return [];
}
public getEncryptKeys(): Array<any> {
return [];
}
public getFingerprint(): string {
return '';
}
}
export {
MockStore,
MockPubSub,
MockConsumer,
MockSignable,
MockKeyStore,
MockSigner,
};

View File

@ -1,13 +1,10 @@
import Automerge = require('automerge');
import assert = require('assert'); import assert = require('assert');
import fs = require('fs'); import fs = require('fs');
import pgp = require('openpgp'); import pgp = require('openpgp');
import sqlite = require('sqlite3'); import sqlite = require('sqlite3');
import * as handlers from '../scripts/server/handlers'; import * as handlers from '../scripts/server/handlers';
import { Envelope, Syncable, ArgPair } from '../src/sync'; import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer, SqliteAdapter } from 'crdt-meta';
import { PGPKeyStore, PGPSigner, KeyStore, Signer } from '../src/auth';
import { SqliteAdapter } from '../src/db';
function createKeystore() { function createKeystore() {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8'); const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');

View File

@ -1,4 +1,4 @@
cic_base[full_graph]~=0.1.2a68 cic_base[full_graph]~=0.1.2b2
cic-eth~=0.11.0b3 cic-eth~=0.11.0b9
cic-notify~=0.4.0a4 cic-notify~=0.4.0a4
cic-types~=0.1.0a10 cic-types~=0.1.0a10

View File

@ -4,9 +4,13 @@ FROM python:3.8.6-slim-buster as compile-image
RUN apt-get update RUN apt-get update
RUN apt-get install -y --no-install-recommends git gcc g++ libpq-dev gawk jq telnet wget openssl iputils-ping gnupg socat bash procps make python2 cargo RUN apt-get install -y --no-install-recommends git gcc g++ libpq-dev gawk jq telnet wget openssl iputils-ping gnupg socat bash procps make python2 cargo
RUN apt-get install -y software-properties-common RUN touch /etc/apt/sources.list.d/ethereum.list
RUN add-apt-repository ppa:ethereum/ethereum RUN echo 'deb http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' > /etc/apt/sources.list.d/ethereum.list
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 1C52189C923F6CA9 RUN echo 'deb-src http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' >> /etc/apt/sources.list.d/ethereum.list
RUN cat etc/apt/sources.list.d/ethereum.list
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
RUN apt-get update RUN apt-get update
RUN apt-get install solc RUN apt-get install solc
RUN pip install --upgrade pip RUN pip install --upgrade pip
@ -57,19 +61,22 @@ WORKDIR /home/grassroots
USER grassroots USER grassroots
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433 ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
ARG cic_base_version=0.1.2a77 ARG cic_base_version=0.1.2a79
ARG cic_eth_version=0.11.0b6 ARG cic_eth_version=0.11.0b8+build.c2286e5c
ARG sarafu_faucet_version=0.0.2a28 ARG sarafu_faucet_version=0.0.2a28
ARG sarafu_token_version==0.0.1a6
ARG cic_contracts_version=0.0.2a2 ARG cic_contracts_version=0.0.2a2
RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \ RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \
cic-eth==$cic_eth_version \ cic-eth==$cic_eth_version \
cic-contracts==$cic_contracts_version \ cic-contracts==$cic_contracts_version \
sarafu-faucet==$sarafu_faucet_version sarafu-faucet==$sarafu_faucet_version \
sarafu-token==$sarafu_token_version
FROM python:3.8.6-slim-buster as runtime-image FROM python:3.8.6-slim-buster as runtime-image
RUN apt-get update RUN apt-get update
RUN apt-get install -y --no-install-recommends gnupg libpq-dev RUN apt-get install -y --no-install-recommends gnupg libpq-dev
RUN apt-get install -y --no-install-recommends jq
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/ COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/ COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/

View File

@ -2,82 +2,111 @@
set -a set -a
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C CIC_CHAIN_SPEC=${CIC_CHAIN_SPEC:-evm:bloxberg:8995}
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER} DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER} DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000} DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000}
faucet_amount=${DEV_FAUCET_AMOUNT:-0} DEV_FAUCET_AMOUNT=${DEV_FAUCET_AMOUNT:-0}
keystore_file=$(realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c) DEV_ETH_KEYSTORE_FILE=${DEV_ETH_KEYSTORE_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
set -e
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $DEV_ETH_KEYSTORE_FILE | jq -r .address)`
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
>&2 echo using static gas price $DEV_ETH_GAS_PRICE
fi
if [[ $CIC_DEFAULT_TOKEN_SYMBOL != 'GFT' && $CIC_DEFAULT_TOKEN_SYMBOL != 'SRF' ]]; then
>&2 echo CIC_DEFAULT_TOKEN_SYMBOL must be one of [GFT,SRF], but was $CIC_DEFAULT_TOKEN_SYMBOL
exit 1
fi
echo "environment:" echo "environment:"
printenv printenv
echo \n echo \n
echo "using wallet address '$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER' from keystore file $DEV_ETH_KEYSTORE_FILE"
# This is a grassroots team convention for building the Bancor contracts using the bancor protocol repository truffle setup # This is a grassroots team convention for building the Bancor contracts using the bancor protocol repository truffle setup
# Running this in docker-internal dev container (built from Docker folder in this repo) will write a # Running this in docker-internal dev container (built from Docker folder in this repo) will write a
# source-able env file to CIC_DATA_DIR. Services dependent on these contracts can mount this file OR # source-able env file to CIC_DATA_DIR. Services dependent on these contracts can mount this file OR
# define these parameters at runtime # define these parameters at runtime
# pushd /usr/src # pushd /usr/src
if [ -z $CIC_DATA_DIR ]; then
CIC_DATA_DIR=`mktemp -d`
fi
>&2 echo using data dir $CIC_DATA_DIR
init_level_file=${CIC_DATA_DIR}/.init init_level_file=${CIC_DATA_DIR}/.init
if [ ! -f ${CIC_DATA_DIR}/.init ]; then if [ ! -f ${CIC_DATA_DIR}/.init ]; then
echo "Creating .init file..." echo "Creating .init file..."
mkdir -p $CIC_DATA_DIR mkdir -p $CIC_DATA_DIR
touch /tmp/cic/config/.init touch $CIC_DATA_DIR/.init
# touch $init_level_file # touch $init_level_file
fi fi
echo -n 1 > $init_level_file echo -n 1 > $init_level_file
# Abort on any error (including if wait-for-it fails). # Abort on any error (including if wait-for-it fails).
set -e
# Wait for the backend to be up, if we know where it is. # Wait for the backend to be up, if we know where it is.
if [[ -n "${ETH_PROVIDER}" ]]; then if [[ -n "${ETH_PROVIDER}" ]]; then
echo "waiting for ${ETH_PROVIDER}..."
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
DEV_RESERVE_ADDRESS=`giftable-token-deploy -p $ETH_PROVIDER -y $keystore_file -i $CIC_CHAIN_SPEC -v -w --name "Sarafu" --symbol "SRF" --decimals 6` if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
giftable-token-gift -p $ETH_PROVIDER -y $keystore_file -i $CIC_CHAIN_SPEC -v -w -a $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT echo "waiting for ${ETH_PROVIDER}..."
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
fi
#BANCOR_REGISTRY_ADDRESS=`cic-bancor-deploy --bancor-dir /usr/local/share/cic/bancor -z $DEV_ETH_RESERVE_ADDRESS -p $ETH_PROVIDER -o $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER` if [ $CIC_DEFAULT_TOKEN_SYMBOL == 'GFT' ]; then
>&2 echo "deploying 'giftable token'"
DEV_RESERVE_ADDRESS=`giftable-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -w --name "Giftable Token" --symbol "GFT" --decimals 6 -vv`
else
>&2 echo "deploying 'sarafu' token'"
DEV_RESERVE_ADDRESS=`sarafu-token-deploy $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -w --name "Sarafu" --decimals 6 -vv SRF $DEV_SARAFU_DEMURRAGE_LEVEL`
fi
giftable-token-gift $gas_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -w -a $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
#BANCOR_REGISTRY_ADDRESS=`cic-bancor-deploy $gas_price_arg --bancor-dir /usr/local/share/cic/bancor -z $DEV_ETH_RESERVE_ADDRESS -p $ETH_PROVIDER -o $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
>&2 echo "deploy account index contract" >&2 echo "deploy account index contract"
DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -y $keystore_file -vv -w` DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $gas_price_arg -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -vv -w`
>&2 echo "add deployer address as account index writer" >&2 echo "add deployer address as account index writer"
eth-accounts-index-writer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER eth-accounts-index-writer $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy -i $CIC_CHAIN_SPEC -y $keystore_file --identifier BancorRegistry --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization -p $ETH_PROVIDER -vv -w` CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $gas_price_arg -i $CIC_CHAIN_SPEC -y $DEV_ETH_KEYSTORE_FILE --identifier BancorRegistry --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization -p $ETH_PROVIDER -vv -w`
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv ContractRegistry $CIC_REGISTRY_ADDRESS eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv ContractRegistry $CIC_REGISTRY_ADDRESS
#cic-registry-set -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -k BancorRegistry -p $ETH_PROVIDER $BANCOR_REGISTRY_ADDRESS -vv eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
# Deploy address declarator registry # Deploy address declarator registry
>&2 echo "deploy address declarator contract" >&2 echo "deploy address declarator contract"
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000 declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -v $declarator_description` DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv $declarator_description`
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv AddressDeclarator $DEV_DECLARATOR_ADDRESS eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv AddressDeclarator $DEV_DECLARATOR_ADDRESS
# Deploy transfer authorization contact # Deploy transfer authorization contact
>&2 echo "deploy address declarator contract" >&2 echo "deploy address declarator contract"
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -v` DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
# Deploy token index contract # Deploy token index contract
>&2 echo "deploy token index contract" >&2 echo "deploy token index contract"
DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -v` DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv TokenRegistry $DEV_TOKEN_INDEX_ADDRESS eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
>&2 echo "add reserve token to token index" >&2 echo "add reserve token to token index"
eth-token-index-add -w -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv -a $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS eth-token-index-add $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv -a $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
# Sarafu faucet contract # Sarafu faucet contract
>&2 echo "deploy token faucet contract" >&2 echo "deploy token faucet contract"
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -v --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS` DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS`
eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv Faucet $DEV_FAUCET_ADDRESS eth-contract-registry-set $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv Faucet $DEV_FAUCET_ADDRESS
>&2 echo "set faucet as token minter" >&2 echo "set faucet as token minter"
giftable-token-minter -w -y $keystore_file -a $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv $DEV_FAUCET_ADDRESS giftable-token-minter $gas_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -a $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv $DEV_FAUCET_ADDRESS
>&2 echo "set token faucet amount" >&2 echo "set token faucet amount"
sarafu-faucet-set -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_FAUCET_ADDRESS $faucet_amount sarafu-faucet-set $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_FAUCET_ADDRESS -vv $DEV_FAUCET_AMOUNT
else else

View File

@ -32,6 +32,7 @@ default_config_dir = '/usr/local/etc/cic'
argparser = argparse.ArgumentParser() argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission') argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
@ -76,6 +77,11 @@ os.makedirs(user_new_dir)
meta_dir = os.path.join(args.user_dir, 'meta') meta_dir = os.path.join(args.user_dir, 'meta')
os.makedirs(meta_dir) os.makedirs(meta_dir)
custom_dir = os.path.join(args.user_dir, 'custom')
os.makedirs(custom_dir)
os.makedirs(os.path.join(custom_dir, 'new'))
os.makedirs(os.path.join(custom_dir, 'meta'))
phone_dir = os.path.join(args.user_dir, 'phone') phone_dir = os.path.join(args.user_dir, 'phone')
os.makedirs(os.path.join(phone_dir, 'meta')) os.makedirs(os.path.join(phone_dir, 'meta'))
@ -85,6 +91,11 @@ os.stat(user_old_dir)
txs_dir = os.path.join(args.user_dir, 'txs') txs_dir = os.path.join(args.user_dir, 'txs')
os.makedirs(txs_dir) os.makedirs(txs_dir)
user_dir = args.user_dir
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
old_chain_str = str(old_chain_spec)
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
chain_str = str(chain_spec) chain_str = str(chain_spec)
@ -133,7 +144,17 @@ def register_eth(i, u):
if __name__ == '__main__': if __name__ == '__main__':
#fi = open(os.path.join(user_out_dir, 'addresses.csv'), 'a') user_tags = {}
f = open(os.path.join(user_dir, 'tags.csv'), 'r')
while True:
r = f.readline().rstrip()
if len(r) == 0:
break
(old_address, tags_csv) = r.split(':')
old_address = strip_0x(old_address)
user_tags[old_address] = tags_csv.split(',')
logg.debug('read tags {} for old address {}'.format(user_tags[old_address], old_address))
i = 0 i = 0
j = 0 j = 0
@ -198,6 +219,29 @@ if __name__ == '__main__':
os.symlink(os.path.realpath(filepath), meta_phone_filepath) os.symlink(os.path.realpath(filepath), meta_phone_filepath)
# custom data
custom_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.custom')
custom_filepath = os.path.join(custom_dir, 'meta', custom_key)
filepath = os.path.join(
custom_dir,
'new',
custom_key[:2].upper(),
custom_key[2:4].upper(),
custom_key.upper() + '.json',
)
os.makedirs(os.path.dirname(filepath), exist_ok=True)
sub_old_chain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
f = open(filepath, 'w')
k = u.identities['evm'][sub_old_chain_str][0]
tag_data = {'tags': user_tags[strip_0x(k)]}
f.write(json.dumps(tag_data))
f.close()
os.symlink(os.path.realpath(filepath), custom_filepath)
i += 1 i += 1
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")

View File

@ -3,10 +3,11 @@ const path = require('path');
const http = require('http'); const http = require('http');
const cic = require('cic-client-meta'); const cic = require('cic-client-meta');
const crdt = require('crdt-meta');
//const conf = JSON.parse(fs.readFileSync('./cic.conf')); //const conf = JSON.parse(fs.readFileSync('./cic.conf'));
const config = new cic.Config('./config'); const config = new crdt.Config('./config');
config.process(); config.process();
console.log(config); console.log(config);
@ -41,7 +42,7 @@ function sendit(uid, envelope) {
} }
function doOne(keystore, filePath) { function doOne(keystore, filePath) {
const signer = new cic.PGPSigner(keystore); const signer = new crdt.PGPSigner(keystore);
const parts = path.basename(filePath).split('.'); const parts = path.basename(filePath).split('.');
const ethereum_address = path.basename(parts[0]); const ethereum_address = path.basename(parts[0]);
@ -51,7 +52,7 @@ function doOne(keystore, filePath) {
//console.log(o); //console.log(o);
fs.unlinkSync(filePath); fs.unlinkSync(filePath);
const s = new cic.Syncable(uid, o); const s = new crdt.Syncable(uid, o);
s.setSigner(signer); s.setSigner(signer);
s.onwrap = (env) => { s.onwrap = (env) => {
sendit(uid, env); sendit(uid, env);
@ -65,7 +66,7 @@ const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_P
pk = fs.readFileSync(privateKeyPath); pk = fs.readFileSync(privateKeyPath);
pubk = fs.readFileSync(publicKeyPath); pubk = fs.readFileSync(publicKeyPath);
new cic.PGPKeyStore( new crdt.PGPKeyStore(
config.get('PGP_PASSPHRASE'), config.get('PGP_PASSPHRASE'),
pk, pk,
pubk, pubk,

View File

@ -0,0 +1,139 @@
const fs = require('fs');
const path = require('path');
const http = require('http');
const cic = require('cic-client-meta');
const vcfp = require('vcard-parser');
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
const config = new cic.Config('./config');
config.process();
console.log(config);
function sendit(uid, envelope) {
const d = envelope.toJSON();
const contentLength = (new TextEncoder().encode(d)).length;
const opts = {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Content-Length': contentLength,
'X-CIC-AUTOMERGE': 'client',
},
};
let url = config.get('META_URL');
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
console.log('posting to url: ' + url + uid);
const req = http.request(url + uid, opts, (res) => {
res.on('data', process.stdout.write);
res.on('end', () => {
console.log('result', res.statusCode, res.headers);
});
});
if (!req.write(d)) {
console.error('foo', d);
process.exit(1);
}
req.end();
}
function doOne(keystore, filePath, identifier) {
const signer = new cic.PGPSigner(keystore);
const o = JSON.parse(fs.readFileSync(filePath).toString());
//const b = Buffer.from(j['vcard'], 'base64');
//const s = b.toString();
//const o = vcfp.parse(s);
//const phone = o.tel[0].value;
//cic.Phone.toKey(phone).then((uid) => {
//const o = fs.readFileSync(filePath, 'utf-8');
const s = new cic.Syncable(identifier, o);
s.setSigner(signer);
s.onwrap = (env) => {
sendit(identifier, env);
};
s.sign();
//});
}
const privateKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
pk = fs.readFileSync(privateKeyPath);
pubk = fs.readFileSync(publicKeyPath);
new cic.PGPKeyStore(
config.get('PGP_PASSPHRASE'),
pk,
pubk,
undefined,
undefined,
importMetaCustom,
);
const batchSize = 16;
const batchDelay = 1000;
const total = parseInt(process.argv[3]);
const dataDir = process.argv[2];
const workDir = path.join(dataDir, 'custom/meta');
const userDir = path.join(dataDir, 'custom/new');
let count = 0;
let batchCount = 0;
function importMetaCustom(keystore) {
let err;
let files;
try {
err, files = fs.readdirSync(workDir);
} catch {
console.error('source directory not yet ready', workDir);
setTimeout(importMetaPhone, batchDelay, keystore);
return;
}
let limit = batchSize;
if (files.length < limit) {
limit = files.length;
}
for (let i = 0; i < limit; i++) {
const file = files[i];
if (file.length < 3) {
console.debug('skipping file', file);
continue;
}
//const identifier = file.substr(0,file.length-5);
const identifier = file;
const filePath = path.join(workDir, file);
console.log(filePath);
//const address = fs.readFileSync(filePath).toString().substring(2).toUpperCase();
const custom = JSON.parse(fs.readFileSync(filePath).toString());
const customFilePath = path.join(
userDir,
identifier.substring(0, 2),
identifier.substring(2, 4),
identifier + '.json',
);
doOne(keystore, filePath, identifier);
fs.unlinkSync(filePath);
count++;
batchCount++;
if (batchCount == batchSize) {
console.debug('reached batch size, breathing');
batchCount=0;
setTimeout(importMeta, batchDelay, keystore);
return;
}
}
if (count == total) {
return;
}
setTimeout(importMetaCustom, 100, keystore);
}

View File

@ -4,10 +4,11 @@ const http = require('http');
const cic = require('cic-client-meta'); const cic = require('cic-client-meta');
const vcfp = require('vcard-parser'); const vcfp = require('vcard-parser');
const crdt = require('crdt-meta');
//const conf = JSON.parse(fs.readFileSync('./cic.conf')); //const conf = JSON.parse(fs.readFileSync('./cic.conf'));
const config = new cic.Config('./config'); const config = new crdt.Config('./config');
config.process(); config.process();
console.log(config); console.log(config);
@ -42,7 +43,7 @@ function sendit(uid, envelope) {
} }
function doOne(keystore, filePath, address) { function doOne(keystore, filePath, address) {
const signer = new cic.PGPSigner(keystore); const signer = new crdt.PGPSigner(keystore);
const j = JSON.parse(fs.readFileSync(filePath).toString()); const j = JSON.parse(fs.readFileSync(filePath).toString());
const b = Buffer.from(j['vcard'], 'base64'); const b = Buffer.from(j['vcard'], 'base64');
@ -51,9 +52,8 @@ function doOne(keystore, filePath, address) {
const phone = o.tel[0].value; const phone = o.tel[0].value;
cic.Phone.toKey(phone).then((uid) => { cic.Phone.toKey(phone).then((uid) => {
const o = fs.readFileSync(filePath, 'utf-8');
const s = new cic.Syncable(uid, o); const s = new crdt.Syncable(uid, address);
s.setSigner(signer); s.setSigner(signer);
s.onwrap = (env) => { s.onwrap = (env) => {
sendit(uid, env); sendit(uid, env);
@ -67,7 +67,7 @@ const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_P
pk = fs.readFileSync(privateKeyPath); pk = fs.readFileSync(privateKeyPath);
pubk = fs.readFileSync(publicKeyPath); pubk = fs.readFileSync(publicKeyPath);
new cic.PGPKeyStore( new crdt.PGPKeyStore(
config.get('PGP_PASSPHRASE'), config.get('PGP_PASSPHRASE'),
pk, pk,
pubk, pubk,
@ -123,7 +123,7 @@ function importMetaPhone(keystore) {
if (batchCount == batchSize) { if (batchCount == batchSize) {
console.debug('reached batch size, breathing'); console.debug('reached batch size, breathing');
batchCount=0; batchCount=0;
setTimeout(importMeta, batchDelay, keystore); setTimeout(importMetaPhone, batchDelay, keystore);
return; return;
} }
} }

View File

@ -37,6 +37,7 @@ config_dir = os.environ.get('CONFINI_DIR', os.path.join(script_dir, 'config'))
argparser = argparse.ArgumentParser() argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='Config dir') argparser.add_argument('-c', type=str, default=config_dir, help='Config dir')
argparser.add_argument('--tag', type=str, action='append', help='Tags to add to record')
argparser.add_argument('--gift-threshold', type=int, help='If set, users will be funded with additional random balance (in token integer units)') argparser.add_argument('--gift-threshold', type=int, help='If set, users will be funded with additional random balance (in token integer units)')
argparser.add_argument('-v', action='store_true', help='Be verbose') argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose') argparser.add_argument('-vv', action='store_true', help='Be more verbose')
@ -81,6 +82,10 @@ phone_idx = []
user_dir = args.dir user_dir = args.dir
user_count = args.user_count user_count = args.user_count
tags = args.tag
if tags == None or len(tags) == 0:
tags = ['individual']
random.seed() random.seed()
def genPhoneIndex(phone): def genPhoneIndex(phone):
@ -189,6 +194,7 @@ if __name__ == '__main__':
os.makedirs(base_dir, exist_ok=True) os.makedirs(base_dir, exist_ok=True)
fa = open(os.path.join(user_dir, 'balances.csv'), 'w') fa = open(os.path.join(user_dir, 'balances.csv'), 'w')
ft = open(os.path.join(user_dir, 'tags.csv'), 'w')
i = 0 i = 0
while i < user_count: while i < user_count:
@ -215,10 +221,12 @@ if __name__ == '__main__':
f.write(eth) f.write(eth)
f.close() f.close()
ft.write('{}:{}\n'.format(eth, ','.join(tags)))
amount = genAmount() amount = genAmount()
fa.write('{},{}\n'.format(eth,amount)) fa.write('{},{}\n'.format(eth,amount))
logg.debug('pidx {}, uid {}, eth {}, amount {}, phone {}'.format(pidx, uid, eth, amount, phone)) logg.debug('pidx {}, uid {}, eth {}, amount {}, phone {}'.format(pidx, uid, eth, amount, phone))
i += 1 i += 1
ft.close()
fa.close() fa.close()

View File

@ -24,7 +24,7 @@ from chainlib.eth.gas import RPCGasOracle
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from cic_types.processor import generate_metadata_pointer from cic_types.processor import generate_metadata_pointer
from eth_accounts_index import AccountRegistry from eth_accounts_index import AccountRegistry
from contract_registry import Registry from eth_contract_registry import Registry
from crypto_dev_signer.keystore.dict import DictKeystore from crypto_dev_signer.keystore.dict import DictKeystore
from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict
@ -38,6 +38,7 @@ argparser = argparse.ArgumentParser()
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)') argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing') argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address') argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address')
argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node') argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node')
@ -68,6 +69,11 @@ os.makedirs(user_new_dir)
meta_dir = os.path.join(args.user_dir, 'meta') meta_dir = os.path.join(args.user_dir, 'meta')
os.makedirs(meta_dir) os.makedirs(meta_dir)
custom_dir = os.path.join(args.user_dir, 'custom')
os.makedirs(custom_dir)
os.makedirs(os.path.join(custom_dir, 'new'))
os.makedirs(os.path.join(custom_dir, 'meta'))
phone_dir = os.path.join(args.user_dir, 'phone') phone_dir = os.path.join(args.user_dir, 'phone')
os.makedirs(os.path.join(phone_dir, 'meta')) os.makedirs(os.path.join(phone_dir, 'meta'))
@ -77,9 +83,14 @@ os.stat(user_old_dir)
txs_dir = os.path.join(args.user_dir, 'txs') txs_dir = os.path.join(args.user_dir, 'txs')
os.makedirs(txs_dir) os.makedirs(txs_dir)
user_dir = args.user_dir
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
chain_str = str(chain_spec) chain_str = str(chain_spec)
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
old_chain_str = str(old_chain_spec)
batch_size = args.batch_size batch_size = args.batch_size
batch_delay = args.batch_delay batch_delay = args.batch_delay
@ -127,12 +138,18 @@ def register_eth(i, u):
return address return address
def register_ussd(u):
pass
if __name__ == '__main__': if __name__ == '__main__':
user_tags = {}
f = open(os.path.join(user_dir, 'tags.csv'), 'r')
while True:
r = f.readline().rstrip()
if len(r) == 0:
break
(old_address, tags_csv) = r.split(':')
old_address = strip_0x(old_address)
user_tags[old_address] = tags_csv.split(',')
logg.debug('read tags {} for old address {}'.format(user_tags[old_address], old_address))
i = 0 i = 0
j = 0 j = 0
@ -150,6 +167,7 @@ if __name__ == '__main__':
continue continue
f.close() f.close()
u = Person.deserialize(o) u = Person.deserialize(o)
logg.debug('u {}'.format(o))
new_address = register_eth(i, u) new_address = register_eth(i, u)
if u.identities.get('evm') == None: if u.identities.get('evm') == None:
@ -157,8 +175,6 @@ if __name__ == '__main__':
sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id()) sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id())
u.identities['evm'][sub_chain_str] = [new_address] u.identities['evm'][sub_chain_str] = [new_address]
register_ussd(u)
new_address_clean = strip_0x(new_address) new_address_clean = strip_0x(new_address)
filepath = os.path.join( filepath = os.path.join(
user_new_dir, user_new_dir,
@ -198,6 +214,29 @@ if __name__ == '__main__':
os.symlink(os.path.realpath(filepath), meta_phone_filepath) os.symlink(os.path.realpath(filepath), meta_phone_filepath)
# custom data
custom_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.custom')
custom_filepath = os.path.join(custom_dir, 'meta', custom_key)
filepath = os.path.join(
custom_dir,
'new',
custom_key[:2].upper(),
custom_key[2:4].upper(),
custom_key.upper() + '.json',
)
os.makedirs(os.path.dirname(filepath), exist_ok=True)
sub_old_chain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
f = open(filepath, 'w')
k = u.identities['evm'][sub_old_chain_str][0]
tag_data = {'tags': user_tags[strip_0x(k)]}
f.write(json.dumps(tag_data))
f.close()
os.symlink(os.path.realpath(filepath), custom_filepath)
i += 1 i += 1
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")

View File

@ -5,39 +5,40 @@
"packages": { "packages": {
"": { "": {
"dependencies": { "dependencies": {
"cic-client-meta": "^0.0.7-alpha.5", "cic-client-meta": "0.0.7-alpha.6",
"vcard-parser": "^1.0.0" "vcard-parser": "^1.0.0"
} }
}, },
"node_modules/@ethereumjs/common": { "node_modules/@ethereumjs/common": {
"version": "2.0.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.2.0.tgz",
"integrity": "sha512-yL0zA7Xwgz8IFHKW0VoXGjdZDVxUJg8BQ/muMHvYPW7zHJNNC80gQmvLH+MpvIg1TCXZkFXxrpYRAyCElSm+aw==", "integrity": "sha512-PyQiTG00MJtBRkJmv46ChZL8u2XWxNBeAthznAUIUiefxPAXjbkuiCZOuncgJS34/XkMbNc9zMt/PlgKRBElig==",
"dependencies": { "dependencies": {
"crc-32": "^1.2.0" "crc-32": "^1.2.0",
"ethereumjs-util": "^7.0.9"
} }
}, },
"node_modules/@ethereumjs/tx": { "node_modules/@ethereumjs/tx": {
"version": "3.0.2", "version": "3.1.3",
"resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.0.2.tgz", "resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.1.3.tgz",
"integrity": "sha512-zmFCosjOdj1WoYEiQBdC4sCOAllBEwxdKuY85L9FgZ4zVDfZUVsQ4S9paczt4hVt65A7N8sJwgVEzDaQmrRaqw==", "integrity": "sha512-DJBu6cbwYtiPTFeCUR8DF5p+PF0jxs+0rALJZiEcTz2tiRPIEkM72GEbrkGuqzENLCzBrJHT43O0DxSYTqeo+g==",
"dependencies": { "dependencies": {
"@ethereumjs/common": "^2.0.0", "@ethereumjs/common": "^2.2.0",
"ethereumjs-util": "^7.0.8" "ethereumjs-util": "^7.0.10"
} }
}, },
"node_modules/@types/bn.js": { "node_modules/@types/bn.js": {
"version": "4.11.6", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz", "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.0.tgz",
"integrity": "sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==", "integrity": "sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA==",
"dependencies": { "dependencies": {
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "14.14.30", "version": "14.14.41",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.30.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz",
"integrity": "sha512-gUWhy8s45fQp4PqqKecsnOkdW0kt1IaKjgOIR3HPokkzTmQj9ji2wWFID5THu1MKrtO+d4s2lVrlEhXUsPXSvg==" "integrity": "sha512-dueRKfaJL4RTtSa7bWeTK1M+VH+Gns73oCgzvYfHZywRCoPSd8EkXBL0mZ9unPTveBn+D9phZBaxuzpwjWkW0g=="
}, },
"node_modules/@types/pbkdf2": { "node_modules/@types/pbkdf2": {
"version": "3.1.0", "version": "3.1.0",
@ -48,9 +49,9 @@
} }
}, },
"node_modules/@types/secp256k1": { "node_modules/@types/secp256k1": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.2.tgz",
"integrity": "sha512-+ZjSA8ELlOp8SlKi0YLB2tz9d5iPNEmOBd+8Rz21wTMdaXQIa9b6TEnD6l5qKOCypE7FSyPyck12qZJxSDNoog==", "integrity": "sha512-QMg+9v0bbNJ2peLuHRWxzmy0HRJIG6gFZNhaRSp7S3ggSbCCxiqQB2/ybvhXyhHOCequpNkrx7OavNhrWOsW0A==",
"dependencies": { "dependencies": {
"@types/node": "*" "@types/node": "*"
} }
@ -158,9 +159,9 @@
} }
}, },
"node_modules/asn1.js/node_modules/bn.js": { "node_modules/asn1.js/node_modules/bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
}, },
"node_modules/assert-plus": { "node_modules/assert-plus": {
"version": "1.0.0", "version": "1.0.0",
@ -204,9 +205,9 @@
"optional": true "optional": true
}, },
"node_modules/balanced-match": { "node_modules/balanced-match": {
"version": "1.0.0", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
}, },
"node_modules/base-x": { "node_modules/base-x": {
"version": "3.0.8", "version": "3.0.8",
@ -243,9 +244,9 @@
} }
}, },
"node_modules/bn.js": { "node_modules/bn.js": {
"version": "5.1.3", "version": "5.2.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.1.3.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz",
"integrity": "sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ==" "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw=="
}, },
"node_modules/brace-expansion": { "node_modules/brace-expansion": {
"version": "1.1.11", "version": "1.1.11",
@ -317,9 +318,9 @@
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
}, },
"node_modules/cic-client-meta": { "node_modules/cic-client-meta": {
"version": "0.0.7-alpha.5", "version": "0.0.7-alpha.6",
"resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.5.tgz", "resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.6.tgz",
"integrity": "sha512-h+0wmAKZIgezppBNYDmG387w6tI91FSWqONMTZbMuaO1Ej76Gg0Mk2UcDyAF/dmY6doXz3kHAbWkWat7mTzXAQ==", "integrity": "sha512-oIN1aHkPHfsxJKDV6k4f1kX2tcppw3Q+D1b4BoPh0hYjNKNb7gImBMWnGsy8uiD9W6SNYE4sIXyrtct8mvrhsw==",
"dependencies": { "dependencies": {
"@ethereumjs/tx": "^3.0.0-beta.1", "@ethereumjs/tx": "^3.0.0-beta.1",
"automerge": "^0.14.1", "automerge": "^0.14.1",
@ -331,7 +332,7 @@
"yargs": "^16.1.0" "yargs": "^16.1.0"
}, },
"engines": { "engines": {
"node": "~15.3.0" "node": "~14.16.1"
} }
}, },
"node_modules/cipher-base": { "node_modules/cipher-base": {
@ -370,9 +371,9 @@
} }
}, },
"node_modules/cliui/node_modules/string-width": { "node_modules/cliui/node_modules/string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"dependencies": { "dependencies": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -562,9 +563,9 @@
} }
}, },
"node_modules/elliptic/node_modules/bn.js": { "node_modules/elliptic/node_modules/bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
}, },
"node_modules/emoji-regex": { "node_modules/emoji-regex": {
"version": "8.0.0", "version": "8.0.0",
@ -602,11 +603,11 @@
} }
}, },
"node_modules/ethereumjs-util": { "node_modules/ethereumjs-util": {
"version": "7.0.8", "version": "7.0.10",
"resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.0.8.tgz", "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.0.10.tgz",
"integrity": "sha512-JJt7tDpCAmDPw/sGoFYeq0guOVqT3pTE9xlEbBmc/nlCij3JRCoS2c96SQ6kXVHOT3xWUNLDm5QCJLQaUnVAtQ==", "integrity": "sha512-c/xThw6A+EAnej5Xk5kOzFzyoSnw0WX0tSlZ6pAsfGVvQj3TItaDg9b1+Fz1RJXA+y2YksKwQnuzgt1eY6LKzw==",
"dependencies": { "dependencies": {
"@types/bn.js": "^4.11.3", "@types/bn.js": "^5.1.0",
"bn.js": "^5.1.2", "bn.js": "^5.1.2",
"create-hash": "^1.1.2", "create-hash": "^1.1.2",
"ethereum-cryptography": "^0.1.3", "ethereum-cryptography": "^0.1.3",
@ -1026,21 +1027,21 @@
} }
}, },
"node_modules/mime-db": { "node_modules/mime-db": {
"version": "1.46.0", "version": "1.47.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.47.0.tgz",
"integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==", "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==",
"optional": true, "optional": true,
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/mime-types": { "node_modules/mime-types": {
"version": "2.1.29", "version": "2.1.30",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.30.tgz",
"integrity": "sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==", "integrity": "sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg==",
"optional": true, "optional": true,
"dependencies": { "dependencies": {
"mime-db": "1.46.0" "mime-db": "1.47.0"
}, },
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"
@ -1361,9 +1362,9 @@
} }
}, },
"node_modules/pbkdf2": { "node_modules/pbkdf2": {
"version": "3.1.1", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.1.tgz", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz",
"integrity": "sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==", "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==",
"dependencies": { "dependencies": {
"create-hash": "^1.1.2", "create-hash": "^1.1.2",
"create-hmac": "^1.1.4", "create-hmac": "^1.1.4",
@ -1382,15 +1383,15 @@
"optional": true "optional": true
}, },
"node_modules/pg": { "node_modules/pg": {
"version": "8.5.1", "version": "8.6.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.5.1.tgz", "resolved": "https://registry.npmjs.org/pg/-/pg-8.6.0.tgz",
"integrity": "sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw==", "integrity": "sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==",
"dependencies": { "dependencies": {
"buffer-writer": "2.0.0", "buffer-writer": "2.0.0",
"packet-reader": "1.0.0", "packet-reader": "1.0.0",
"pg-connection-string": "^2.4.0", "pg-connection-string": "^2.5.0",
"pg-pool": "^3.2.2", "pg-pool": "^3.3.0",
"pg-protocol": "^1.4.0", "pg-protocol": "^1.5.0",
"pg-types": "^2.1.0", "pg-types": "^2.1.0",
"pgpass": "1.x" "pgpass": "1.x"
}, },
@ -1399,9 +1400,9 @@
} }
}, },
"node_modules/pg-connection-string": { "node_modules/pg-connection-string": {
"version": "2.4.0", "version": "2.5.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
"integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
}, },
"node_modules/pg-int8": { "node_modules/pg-int8": {
"version": "1.0.1", "version": "1.0.1",
@ -1412,14 +1413,14 @@
} }
}, },
"node_modules/pg-pool": { "node_modules/pg-pool": {
"version": "3.2.2", "version": "3.3.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.2.tgz", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.3.0.tgz",
"integrity": "sha512-ORJoFxAlmmros8igi608iVEbQNNZlp89diFVx6yV5v+ehmpMY9sK6QgpmgoXbmkNaBAx8cOOZh9g80kJv1ooyA==" "integrity": "sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg=="
}, },
"node_modules/pg-protocol": { "node_modules/pg-protocol": {
"version": "1.4.0", "version": "1.5.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.4.0.tgz", "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
"integrity": "sha512-El+aXWcwG/8wuFICMQjM5ZSAm6OWiJicFdNYo+VY3QP+8vI4SvLIWVe51PppTzMhikUJR+PsyIFKqfdXPz/yxA==" "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
}, },
"node_modules/pg-types": { "node_modules/pg-types": {
"version": "2.2.0", "version": "2.2.0",
@ -1625,9 +1626,9 @@
} }
}, },
"node_modules/rlp/node_modules/bn.js": { "node_modules/rlp/node_modules/bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
}, },
"node_modules/safe-buffer": { "node_modules/safe-buffer": {
"version": "5.2.1", "version": "5.2.1",
@ -1958,9 +1959,9 @@
} }
}, },
"node_modules/wrap-ansi/node_modules/string-width": { "node_modules/wrap-ansi/node_modules/string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"dependencies": { "dependencies": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -2005,9 +2006,9 @@
} }
}, },
"node_modules/y18n": { "node_modules/y18n": {
"version": "5.0.5", "version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"engines": { "engines": {
"node": ">=10" "node": ">=10"
} }
@ -2035,9 +2036,9 @@
} }
}, },
"node_modules/yargs-parser": { "node_modules/yargs-parser": {
"version": "20.2.5", "version": "20.2.7",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.5.tgz", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.7.tgz",
"integrity": "sha512-jYRGS3zWy20NtDtK2kBgo/TlAoy5YUuhD9/LZ7z7W4j1Fdw2cqD0xEEclf8fxc8xjD6X5Qr+qQQwCEsP8iRiYg==", "integrity": "sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw==",
"engines": { "engines": {
"node": ">=10" "node": ">=10"
} }
@ -2059,9 +2060,9 @@
} }
}, },
"node_modules/yargs/node_modules/string-width": { "node_modules/yargs/node_modules/string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"dependencies": { "dependencies": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -2085,34 +2086,35 @@
}, },
"dependencies": { "dependencies": {
"@ethereumjs/common": { "@ethereumjs/common": {
"version": "2.0.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.2.0.tgz",
"integrity": "sha512-yL0zA7Xwgz8IFHKW0VoXGjdZDVxUJg8BQ/muMHvYPW7zHJNNC80gQmvLH+MpvIg1TCXZkFXxrpYRAyCElSm+aw==", "integrity": "sha512-PyQiTG00MJtBRkJmv46ChZL8u2XWxNBeAthznAUIUiefxPAXjbkuiCZOuncgJS34/XkMbNc9zMt/PlgKRBElig==",
"requires": { "requires": {
"crc-32": "^1.2.0" "crc-32": "^1.2.0",
"ethereumjs-util": "^7.0.9"
} }
}, },
"@ethereumjs/tx": { "@ethereumjs/tx": {
"version": "3.0.2", "version": "3.1.3",
"resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.0.2.tgz", "resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.1.3.tgz",
"integrity": "sha512-zmFCosjOdj1WoYEiQBdC4sCOAllBEwxdKuY85L9FgZ4zVDfZUVsQ4S9paczt4hVt65A7N8sJwgVEzDaQmrRaqw==", "integrity": "sha512-DJBu6cbwYtiPTFeCUR8DF5p+PF0jxs+0rALJZiEcTz2tiRPIEkM72GEbrkGuqzENLCzBrJHT43O0DxSYTqeo+g==",
"requires": { "requires": {
"@ethereumjs/common": "^2.0.0", "@ethereumjs/common": "^2.2.0",
"ethereumjs-util": "^7.0.8" "ethereumjs-util": "^7.0.10"
} }
}, },
"@types/bn.js": { "@types/bn.js": {
"version": "4.11.6", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz", "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.0.tgz",
"integrity": "sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==", "integrity": "sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA==",
"requires": { "requires": {
"@types/node": "*" "@types/node": "*"
} }
}, },
"@types/node": { "@types/node": {
"version": "14.14.30", "version": "14.14.39",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.30.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.39.tgz",
"integrity": "sha512-gUWhy8s45fQp4PqqKecsnOkdW0kt1IaKjgOIR3HPokkzTmQj9ji2wWFID5THu1MKrtO+d4s2lVrlEhXUsPXSvg==" "integrity": "sha512-Qipn7rfTxGEDqZiezH+wxqWYR8vcXq5LRpZrETD19Gs4o8LbklbmqotSUsMU+s5G3PJwMRDfNEYoxrcBwIxOuw=="
}, },
"@types/pbkdf2": { "@types/pbkdf2": {
"version": "3.1.0", "version": "3.1.0",
@ -2123,9 +2125,9 @@
} }
}, },
"@types/secp256k1": { "@types/secp256k1": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.2.tgz",
"integrity": "sha512-+ZjSA8ELlOp8SlKi0YLB2tz9d5iPNEmOBd+8Rz21wTMdaXQIa9b6TEnD6l5qKOCypE7FSyPyck12qZJxSDNoog==", "integrity": "sha512-QMg+9v0bbNJ2peLuHRWxzmy0HRJIG6gFZNhaRSp7S3ggSbCCxiqQB2/ybvhXyhHOCequpNkrx7OavNhrWOsW0A==",
"requires": { "requires": {
"@types/node": "*" "@types/node": "*"
} }
@ -2229,9 +2231,9 @@
}, },
"dependencies": { "dependencies": {
"bn.js": { "bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
} }
} }
}, },
@ -2271,9 +2273,9 @@
"optional": true "optional": true
}, },
"balanced-match": { "balanced-match": {
"version": "1.0.0", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
}, },
"base-x": { "base-x": {
"version": "3.0.8", "version": "3.0.8",
@ -2307,9 +2309,9 @@
} }
}, },
"bn.js": { "bn.js": {
"version": "5.1.3", "version": "5.2.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.1.3.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.0.tgz",
"integrity": "sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ==" "integrity": "sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw=="
}, },
"brace-expansion": { "brace-expansion": {
"version": "1.1.11", "version": "1.1.11",
@ -2378,14 +2380,15 @@
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
}, },
"cic-client-meta": { "cic-client-meta": {
"version": "0.0.7-alpha.5", "version": "0.0.7-alpha.8",
"resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.5.tgz", "resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.8.tgz",
"integrity": "sha512-h+0wmAKZIgezppBNYDmG387w6tI91FSWqONMTZbMuaO1Ej76Gg0Mk2UcDyAF/dmY6doXz3kHAbWkWat7mTzXAQ==", "integrity": "sha512-NtU4b4dptG2gsKXIvAv1xCxxxhrr801tb8+Co1O+VLx+wvxFyPRxqa2f2eN5nrSnFnljNsWWpE6K5bJZb1+Rqw==",
"requires": { "requires": {
"@ethereumjs/tx": "^3.0.0-beta.1", "@ethereumjs/tx": "^3.0.0-beta.1",
"automerge": "^0.14.1", "automerge": "^0.14.1",
"crdt-meta": "0.0.8",
"ethereumjs-wallet": "^1.0.1", "ethereumjs-wallet": "^1.0.1",
"ini": "^1.3.5", "ini": "^1.3.8",
"openpgp": "^4.10.8", "openpgp": "^4.10.8",
"pg": "^8.4.2", "pg": "^8.4.2",
"sqlite3": "^5.0.0", "sqlite3": "^5.0.0",
@ -2422,9 +2425,9 @@
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
}, },
"string-width": { "string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"requires": { "requires": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -2492,6 +2495,18 @@
"printj": "~1.1.0" "printj": "~1.1.0"
} }
}, },
"crdt-meta": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/crdt-meta/-/crdt-meta-0.0.8.tgz",
"integrity": "sha512-CS0sS0L2QWthz7vmu6vzl3p4kcpJ+IKILBJ4tbgN4A3iNG8wnBeuDIv/z3KFFQjcfuP4QAh6E9LywKUTxtDc3g==",
"requires": {
"automerge": "^0.14.2",
"ini": "^1.3.8",
"openpgp": "^4.10.8",
"pg": "^8.5.1",
"sqlite3": "^5.0.2"
}
},
"create-hash": { "create-hash": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
@ -2580,9 +2595,9 @@
}, },
"dependencies": { "dependencies": {
"bn.js": { "bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
} }
} }
}, },
@ -2619,11 +2634,11 @@
} }
}, },
"ethereumjs-util": { "ethereumjs-util": {
"version": "7.0.8", "version": "7.0.10",
"resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.0.8.tgz", "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.0.10.tgz",
"integrity": "sha512-JJt7tDpCAmDPw/sGoFYeq0guOVqT3pTE9xlEbBmc/nlCij3JRCoS2c96SQ6kXVHOT3xWUNLDm5QCJLQaUnVAtQ==", "integrity": "sha512-c/xThw6A+EAnej5Xk5kOzFzyoSnw0WX0tSlZ6pAsfGVvQj3TItaDg9b1+Fz1RJXA+y2YksKwQnuzgt1eY6LKzw==",
"requires": { "requires": {
"@types/bn.js": "^4.11.3", "@types/bn.js": "^5.1.0",
"bn.js": "^5.1.2", "bn.js": "^5.1.2",
"create-hash": "^1.1.2", "create-hash": "^1.1.2",
"ethereum-cryptography": "^0.1.3", "ethereum-cryptography": "^0.1.3",
@ -2980,18 +2995,18 @@
} }
}, },
"mime-db": { "mime-db": {
"version": "1.46.0", "version": "1.47.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.47.0.tgz",
"integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==", "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==",
"optional": true "optional": true
}, },
"mime-types": { "mime-types": {
"version": "2.1.29", "version": "2.1.30",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.30.tgz",
"integrity": "sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==", "integrity": "sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg==",
"optional": true, "optional": true,
"requires": { "requires": {
"mime-db": "1.46.0" "mime-db": "1.47.0"
} }
}, },
"minimalistic-assert": { "minimalistic-assert": {
@ -3249,9 +3264,9 @@
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
}, },
"pbkdf2": { "pbkdf2": {
"version": "3.1.1", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.1.tgz", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz",
"integrity": "sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==", "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==",
"requires": { "requires": {
"create-hash": "^1.1.2", "create-hash": "^1.1.2",
"create-hmac": "^1.1.4", "create-hmac": "^1.1.4",
@ -3267,23 +3282,23 @@
"optional": true "optional": true
}, },
"pg": { "pg": {
"version": "8.5.1", "version": "8.6.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.5.1.tgz", "resolved": "https://registry.npmjs.org/pg/-/pg-8.6.0.tgz",
"integrity": "sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw==", "integrity": "sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==",
"requires": { "requires": {
"buffer-writer": "2.0.0", "buffer-writer": "2.0.0",
"packet-reader": "1.0.0", "packet-reader": "1.0.0",
"pg-connection-string": "^2.4.0", "pg-connection-string": "^2.5.0",
"pg-pool": "^3.2.2", "pg-pool": "^3.3.0",
"pg-protocol": "^1.4.0", "pg-protocol": "^1.5.0",
"pg-types": "^2.1.0", "pg-types": "^2.1.0",
"pgpass": "1.x" "pgpass": "1.x"
} }
}, },
"pg-connection-string": { "pg-connection-string": {
"version": "2.4.0", "version": "2.5.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
"integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
}, },
"pg-int8": { "pg-int8": {
"version": "1.0.1", "version": "1.0.1",
@ -3291,14 +3306,14 @@
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="
}, },
"pg-pool": { "pg-pool": {
"version": "3.2.2", "version": "3.3.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.2.tgz", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.3.0.tgz",
"integrity": "sha512-ORJoFxAlmmros8igi608iVEbQNNZlp89diFVx6yV5v+ehmpMY9sK6QgpmgoXbmkNaBAx8cOOZh9g80kJv1ooyA==" "integrity": "sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg=="
}, },
"pg-protocol": { "pg-protocol": {
"version": "1.4.0", "version": "1.5.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.4.0.tgz", "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
"integrity": "sha512-El+aXWcwG/8wuFICMQjM5ZSAm6OWiJicFdNYo+VY3QP+8vI4SvLIWVe51PppTzMhikUJR+PsyIFKqfdXPz/yxA==" "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
}, },
"pg-types": { "pg-types": {
"version": "2.2.0", "version": "2.2.0",
@ -3459,9 +3474,9 @@
}, },
"dependencies": { "dependencies": {
"bn.js": { "bn.js": {
"version": "4.11.9", "version": "4.12.0",
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
"integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
} }
} }
}, },
@ -3734,9 +3749,9 @@
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
}, },
"string-width": { "string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"requires": { "requires": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -3774,9 +3789,9 @@
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
}, },
"y18n": { "y18n": {
"version": "5.0.5", "version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==" "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="
}, },
"yallist": { "yallist": {
"version": "3.1.1", "version": "3.1.1",
@ -3808,9 +3823,9 @@
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
}, },
"string-width": { "string-width": {
"version": "4.2.0", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
"integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
"requires": { "requires": {
"emoji-regex": "^8.0.0", "emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0", "is-fullwidth-code-point": "^3.0.0",
@ -3828,9 +3843,9 @@
} }
}, },
"yargs-parser": { "yargs-parser": {
"version": "20.2.5", "version": "20.2.7",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.5.tgz", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.7.tgz",
"integrity": "sha512-jYRGS3zWy20NtDtK2kBgo/TlAoy5YUuhD9/LZ7z7W4j1Fdw2cqD0xEEclf8fxc8xjD6X5Qr+qQQwCEsP8iRiYg==" "integrity": "sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw=="
} }
} }
} }

View File

@ -1,6 +1,7 @@
{ {
"dependencies": { "dependencies": {
"cic-client-meta": "^0.0.7-alpha.6", "cic-client-meta": "^0.0.7-alpha.8",
"crdt-meta": "0.0.8",
"vcard-parser": "^1.0.0" "vcard-parser": "^1.0.0"
} }
} }

View File

@ -1,5 +1,5 @@
cic-base[full_graph]==0.1.2a77 cic-base[full_graph]==0.1.2b2
sarafu-faucet==0.0.2a28 sarafu-faucet==0.0.2a28
cic-eth==0.11.0b6 cic-eth==0.11.0b10
cic-types==0.1.0a10 cic-types==0.1.0a10
crypto-dev-signer==0.4.14b2 crypto-dev-signer==0.4.14b3

View File

@ -337,12 +337,11 @@ class Verifier:
address_recovered = address_recovered.replace('"', '') address_recovered = address_recovered.replace('"', '')
try: try:
address = strip_0x(address) upper_address_recovered = strip_0x(address_recovered).upper()
address_recovered = strip_0x(address_recovered)
except ValueError: except ValueError:
raise VerifierError(address_recovered, 'metadata (phone) address {} address recovered {}'.format(address, address_recovered)) raise VerifierError(address_recovered, 'metadata (phone) address {} address recovered {}'.format(address, address_recovered))
if address != address_recovered: if upper_address != upper_address_recovered:
raise VerifierError(address_recovered, 'metadata (phone)') raise VerifierError(address_recovered, 'metadata (phone)')

View File

@ -13,6 +13,11 @@ DEV_PIP_EXTRA_INDEX_URL=${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomi
DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"} DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic} CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
ETH_PASSPHRASE='' ETH_PASSPHRASE=''
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
if [[ $CIC_DEFAULT_TOKEN_SYMBOL != 'GFT' && $CIC_DEFAULT_TOKEN_SYMBOL != 'SRF' ]]; then
>&2 echo CIC_DEFAULT_TOKEN_SYMBOL must be one of [GFT,SRF], but was $CIC_DEFAULT_TOKEN_SYMBOL
exit 1
fi
# Debug flag # Debug flag
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
@ -33,11 +38,11 @@ set -a
# get required addresses from registries # get required addresses from registries
DEV_TOKEN_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief TokenRegistry` DEV_TOKEN_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief TokenRegistry`
DEV_ACCOUNT_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief AccountRegistry` DEV_ACCOUNT_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief AccountRegistry`
DEV_RESERVE_ADDRESS=`eth-token-index-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_TOKEN_INDEX_ADDRESS -f brief SRF` DEV_RESERVE_ADDRESS=`eth-token-index-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_TOKEN_INDEX_ADDRESS -f brief $CIC_DEFAULT_TOKEN_SYMBOL`
cat <<EOF cat <<EOF
Token registry: $DEV_TOKEN_INDEX_ADDRESS Token registry: $DEV_TOKEN_INDEX_ADDRESS
Account reigstry: $DEV_ACCOUNT_INDEX_ADDRESS Account reigstry: $DEV_ACCOUNT_INDEX_ADDRESS
Reserve address: $DEV_RESERVE_ADDRESS Reserve address: $DEV_RESERVE_ADDRESS ($CIC_DEFAULT_TOKEN_SYMBOL)
EOF EOF
>&2 echo "create account for gas gifter" >&2 echo "create account for gas gifter"
@ -46,6 +51,7 @@ DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create $debug --redis-host-callback=$REDIS_H
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER >> $env_out_file echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER >> $env_out_file
cic-eth-tag -i $CIC_CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER cic-eth-tag -i $CIC_CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
>&2 echo "create account for sarafu gifter" >&2 echo "create account for sarafu gifter"
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register` DEV_ETH_ACCOUNT_SARAFU_GIFTER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER >> $env_out_file echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER >> $env_out_file
@ -97,5 +103,10 @@ export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
#echo -n 0 > $init_level_file #echo -n 0 > $init_level_file
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
cic-eth-ctl -i :: unlock INIT
cic-eth-ctl -i :: unlock SEND
cic-eth-ctl -i :: unlock QUEUE
set +a set +a
set +e set +e

View File

@ -0,0 +1 @@
include *health*.sh

View File

@ -0,0 +1,10 @@
docs:
mkdir -p doc/texinfo/html
makeinfo doc/texinfo/index.texi --html -o doc/texinfo/html/
markdown: doc
pandoc -f html -t markdown --standalone doc/texinfo/html/liveness.html -o README.md
.PHONY dist:
python setup.py sdist

View File

@ -0,0 +1,105 @@
---
description: liveness (Untitled Document)
distribution: global
Generator: makeinfo
keywords: liveness (Untitled Document)
lang: en
resource-type: document
title: liveness (Untitled Document)
---
[]{#liveness}[]{#liveness-1}
## 1 liveness {#liveness .chapter}
[]{#ilveness_005foverview}[]{#Overview}
### 1.1 Overview {#overview .section}
This is a cluster-specific convenience setup for enabling a
Kubernetes-style liveness/readiness test as outlined in
<https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>.
Conceptually, it provides an application with means to:
- Run a collection of functions to validate sanity of the environment
- Set a no-error state before execution of the main routine
- Modify the error state during execution
- Invalidating all state when execution ends
[]{#Python-module}
### 1.2 Python module {#python-module .section}
Three python methods are provided.
[]{#load}
#### 1.2.1 load {#load .subsection}
This is meant to be called after configurations and environment has been
set up, but before the execution logic has commenced.
It receives a list of externally defined fully-qualified python modules.
Each of these modules must implement the method `health(*args,**kwargs)`
in its global namespace.
Any module returning `False` will cause a `RuntimeException`.
The component will not trap any other exception from the modules.
If successful, it will write the `pid` of the application to the
specified run data folder. By default this is `/run/<HOSTNAME>`, but the
path can be modified if desired.
[]{#set}
#### 1.2.2 set {#set .subsection}
This is meant to be called during the execution of the main program
routine begins.
[]{#at-startup}
#### 1.2.2.1 at startup {#at-startup .subsubsection}
It should be called once at the *start* of execution of the main program
routine.
For one-shot routines, this would mean the start of any code only run
when the module name is `__main__`.
For daemons, it would be just before handing over execution to the main
loop.
[]{#during-execution}
#### 1.2.2.2 during execution {#during-execution .subsubsection}
Call `set(error_code=<error>, ...` any time the health state temporarily
changes. Any `error` value other than `0` is considered an unhealthy
state.
[]{#at-shutdown}
#### 1.2.2.3 at shutdown {#at-shutdown .subsubsection}
Call `reset(...)`, which will indicate that the state is to be
considered the same as at startup.
[]{#shell}
### 1.3 shell {#shell .section}
A bash script is provided for *Kubernetes* to perform the health check.
It performs the following checks:
1. A numeric value exists in `<rundir>/<unitname>/pid`{.sample}.
2. The numeric value is a directory in `/proc`{.sample} (a valid pid)
3. The file `<rundir>/<unitname>/error`{.sample} contains \"0\"
If any of these checks fail should inditcate that the container is
unhealthy.
------------------------------------------------------------------------

View File

@ -0,0 +1,71 @@
@node liveness
@chapter liveness
@anchor{ilveness_overview}
@section Overview
This is a cluster-specific convenience setup for enabling a Kubernetes-style liveness/readiness test as outlined in @url{https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/}.
Conceptually, it provides an application with means to:
@itemize
@item Run a collection of functions to validate sanity of the environment
@item Set a no-error state before execution of the main routine
@item Modify the error state during execution
@item Invalidating all state when execution ends
@end itemize
@section Python module
Three python methods are provided.
@subsection load
This is meant to be called after configurations and environment has been set up, but before the execution logic has commenced.
It receives a list of externally defined fully-qualified python modules. Each of these modules must implement the method @code{health(*args,**kwargs)} in its global namespace.
Any module returning @code{False} will cause a @code{RuntimeException}.
The component will not trap any other exception from the modules.
If successful, it will write the @code{pid} of the application to the specified run data folder. By default this is @code{/run/<HOSTNAME>}, but the path can be modified if desired.
@subsection set
This is meant to be called during the execution of the main program routine begins.
@subsubsection at startup
It should be called once at the @emph{start} of execution of the main program routine.
For one-shot routines, this would mean the start of any code only run when the module name is @code{__main__}.
For daemons, it would be just before handing over execution to the main loop.
@subsubsection during execution
Call @code{set(error_code=<error>, ...} any time the health state temporarily changes. Any @code{error} value other than @code{0} is considered an unhealthy state.
@subsubsection at shutdown
Call @code{reset(...)}, which will indicate that the state is to be considered the same as at startup.
@section shell
A bash script is provided for @emph{Kubernetes} to perform the health check.
It performs the following checks:
@enumerate
@item A numeric value exists in @file{<rundir>/<unitname>/pid}.
@item The numeric value is a directory in @file{/proc} (a valid pid)
@item The file @file{<rundir>/<unitname>/error} contains "0"
@end enumerate
If any of these checks fail should inditcate that the container is unhealthy.

View File

@ -0,0 +1,35 @@
#!/bin/bash
rundir=${CIC_RUNDIR:-/run}
unit=${CIC_UNIT:-$HOSTNAME}
read p < $rundir/$unit/pid
if [ -z $p ]; then
>&2 echo unit $unit has no pid
exit 1
fi
if [ ! -d /proc/$p ]; then
>&2 echo unit $unit reports non-existent pid $p
exit 1
fi
>&2 echo unit $unit has pid $p
if [ ! -f $rundir/$unit/error ]; then
>&2 echo unit $unit has unspecified state
exit 1
fi
read e 2> /dev/null < $rundir/$unit/error
if [ -z $e ]; then
>&2 echo unit $unit has unspecified state
exit 1
fi
>&2 echo unit $unit has error $e
if [ $e -gt 0 ]; then
exit 1;
fi

View File

@ -0,0 +1,62 @@
# standard imports
import importlib
import sys
import os
import logging
logg = logging.getLogger().getChild(__name__)
pid = os.getpid()
default_namespace = os.environ.get('LIVENESS_UNIT_NAME')
if default_namespace == None:
import socket
default_namespace = socket.gethostname()
def load(check_strs, namespace=default_namespace, rundir='/run', *args, **kwargs):
if namespace == None:
import socket
namespace = socket.gethostname()
logg.info('pid ' + str(pid))
checks = []
for m in check_strs:
logg.debug('added liveness check: {}'.format(str(m)))
module = importlib.import_module(m)
checks.append(module)
for check in checks:
r = check.health(*args, **kwargs)
if r == False:
raise RuntimeError('liveness check {} failed'.format(str(check)))
logg.info('liveness check passed: {}'.format(str(check)))
app_rundir = os.path.join(rundir, namespace)
os.makedirs(app_rundir, exist_ok=True) # should not already exist
f = open(os.path.join(app_rundir, 'pid'), 'w')
f.write(str(pid))
f.close()
def set(error=0, namespace=default_namespace, rundir='/run'):
logg.info('liveness SET error {} for namespace {}'.format(error, namespace))
app_rundir = os.path.join(rundir, namespace)
f = open(os.path.join(app_rundir, 'error'), 'w')
f.write(str(error))
f.close()
def reset(namespace=default_namespace, rundir='/run'):
logg.info('liveness RESET for namespace {}'.format(namespace))
app_rundir = os.path.join(rundir, namespace)
try:
os.unlink(os.path.join(app_rundir, 'pid'))
except FileNotFoundError:
pass
try:
os.unlink(os.path.join(app_rundir, 'error'))
except FileNotFoundError:
pass

View File

@ -0,0 +1,7 @@
from setuptools import setup
setup(
name='liveness',
version='0.0.1a7',
packages=['liveness'],
include_package_data=True,
)

View File

@ -0,0 +1,17 @@
#!/bin/bash
export CIC_RUNDIR=`realpath ./tests/testdata/run`
t=`mktemp -d -p $CIC_RUNDIR`
export CIC_UNIT=`basename $t`
>&2 echo test pid $$
echo $$ > $t/pid
echo 0 > $t/error
. health.sh
echo 1 > $t/error
#unlink $t/error
. health.sh
echo if error this is not printed

View File

@ -0,0 +1,8 @@
a = ['foo']
kw = {
'bar': 42,
}
def health(*args, **kwargs):
args[0] == a[0]
kwargs['bar'] = kw['bar']

View File

@ -0,0 +1,2 @@
def health(*args, **kwargs):
return False

View File

@ -0,0 +1,2 @@
def health(*args, **kwargs):
return True

View File

@ -0,0 +1,129 @@
# standard imports
import os
import unittest
import logging
import tempfile
import socket
# local imports
import liveness.linux
## test imports
import tests.imports
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
script_dir = os.path.realpath(os.path.dirname(__file__))
data_dir = os.path.join(script_dir, 'testdata')
run_base_dir = os.path.join(data_dir, 'run')
class TestImports(unittest.TestCase):
def setUp(self):
os.makedirs(run_base_dir, exist_ok=True)
self.run_dir = tempfile.mkdtemp(dir=run_base_dir)
self.unit = 'unittest'
self.unit_dir = os.path.join(self.run_dir, self.unit)
self.pid_path = os.path.join(self.unit_dir, 'pid')
self.error_path = os.path.join(self.unit_dir, 'error')
self.host_path = os.path.join(self.run_dir, socket.gethostname())
def test_no_import(self):
liveness.linux.load([], namespace=self.unit, rundir=self.run_dir)
f = open(self.pid_path, 'r')
r = f.read()
f.close()
self.assertEqual(str(os.getpid()), r)
def test_hostname(self):
liveness.linux.load([], rundir=self.run_dir)
f = open(os.path.join(self.host_path, 'pid'), 'r')
r = f.read()
f.close()
self.assertEqual(str(os.getpid()), r)
def test_import_single_true(self):
checks = ['tests.imports.import_true']
liveness.linux.load(checks, namespace=self.unit, rundir=self.run_dir)
f = open(self.pid_path, 'r')
r = f.read()
f.close()
self.assertEqual(str(os.getpid()), r)
def test_import_single_false(self):
checks = ['tests.imports.import_false']
with self.assertRaises(RuntimeError):
liveness.linux.load(checks, namespace=self.unit, rundir=self.run_dir)
with self.assertRaises(FileNotFoundError):
os.stat(self.pid_path)
def test_import_false_then_true(self):
checks = ['tests.imports.import_false', 'tests.imports.import_true']
with self.assertRaises(RuntimeError):
liveness.linux.load(checks, namespace=self.unit, rundir=self.run_dir)
with self.assertRaises(FileNotFoundError):
os.stat(self.pid_path)
def test_import_multiple_true(self):
checks = ['tests.imports.import_true', 'tests.imports.import_true']
liveness.linux.load(checks, namespace=self.unit, rundir=self.run_dir)
f = open(self.pid_path, 'r')
r = f.read()
f.close()
self.assertEqual(str(os.getpid()), r)
def test_set(self):
liveness.linux.load([], namespace='unittest', rundir=self.run_dir)
liveness.linux.set(namespace='unittest', rundir=self.run_dir)
f = open(self.error_path, 'r')
r = f.read()
f.close()
self.assertEqual('0', r)
liveness.linux.set(error=42, namespace='unittest', rundir=self.run_dir)
f = open(self.error_path, 'r')
r = f.read()
f.close()
self.assertEqual('42', r)
liveness.linux.reset(namespace='unittest', rundir=self.run_dir)
with self.assertRaises(FileNotFoundError):
os.stat(self.error_path)
def test_set_hostname(self):
liveness.linux.load([], rundir=self.run_dir)
liveness.linux.set(rundir=self.run_dir)
error_path = os.path.join(self.host_path, 'error')
f = open(error_path, 'r')
r = f.read()
f.close()
self.assertEqual('0', r)
liveness.linux.reset(rundir=self.run_dir)
with self.assertRaises(FileNotFoundError):
os.stat(error_path)
def test_args(self):
checks = ['tests.imports.import_args']
aargs=['foo']
kwaargs={'bar': 42}
liveness.linux.load(checks, self.unit, self.run_dir, *aargs, **kwaargs)
f = open(self.pid_path, 'r')
r = f.read()
f.close()
self.assertEqual(str(os.getpid()), r)
if __name__ == '__main__':
unittest.main()

View File

@ -85,6 +85,7 @@ services:
# ETH_PROVIDER should be broken out into host/port but cic-eth expects this # ETH_PROVIDER should be broken out into host/port but cic-eth expects this
ETH_PROVIDER: http://eth:8545 ETH_PROVIDER: http://eth:8545
# And these two are for wait-for-it (could parse this) # And these two are for wait-for-it (could parse this)
DEV_USE_DOCKER_WAIT_SCRIPT: 1
ETH_PROVIDER_HOST: eth ETH_PROVIDER_HOST: eth
ETH_PROVIDER_PORT: 8545 ETH_PROVIDER_PORT: 8545
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996} CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
@ -103,6 +104,9 @@ services:
DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433} DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
RUN_MASK: ${RUN_MASK:-0} # bit flags; 1: contract migrations 2: seed data RUN_MASK: ${RUN_MASK:-0} # bit flags; 1: contract migrations 2: seed data
DEV_FAUCET_AMOUNT: ${DEV_FAUCET_AMOUNT:-0} DEV_FAUCET_AMOUNT: ${DEV_FAUCET_AMOUNT:-0}
CIC_DEFAULT_TOKEN_SYMBOL: ${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
DEV_SARAFU_DEMURRAGE_LEVEL: ${DEV_SARAFU_DEMURRAGE_LEVEL:-196454828847045000000000000000000}
DEV_ETH_GAS_PRICE: ${DEV_ETH_GAS_PRICE:-1}
command: ["./run_job.sh"] command: ["./run_job.sh"]
#command: ["./reset.sh"] #command: ["./reset.sh"]
depends_on: depends_on:
@ -236,6 +240,8 @@ services:
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2} DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
DATABASE_DEBUG: ${DATABASE_DEBUG:-0} DATABASE_DEBUG: ${DATABASE_DEBUG:-0}
DATABASE_POOL_SIZE: 0 DATABASE_POOL_SIZE: 0
REDIS_PORT: 6379
REDIS_HOST: redis
PGPASSWORD: ${DATABASE_PASSWORD:-tralala} PGPASSWORD: ${DATABASE_PASSWORD:-tralala}
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996} CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor} BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
@ -245,6 +251,7 @@ services:
SIGNER_SECRET: ${SIGNER_SECRET:-deadbeef} SIGNER_SECRET: ${SIGNER_SECRET:-deadbeef}
ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER: ${DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER:-0xACB0BC74E1686D62dE7DC6414C999EA60C09F0eA} ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER: ${DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER:-0xACB0BC74E1686D62dE7DC6414C999EA60C09F0eA}
TASKS_TRACE_QUEUE_STATUS: ${TASKS_TRACE_QUEUE_STATUS:-1} TASKS_TRACE_QUEUE_STATUS: ${TASKS_TRACE_QUEUE_STATUS:-1}
CIC_DEFAULT_TOKEN_SYMBOL: ${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
depends_on: depends_on:
- eth - eth
- postgres - postgres