Merge branch 'master' into lash/ci-for-scripts
This commit is contained in:
commit
55a4d14940
@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
DEBUG=
|
||||||
|
@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
|
DEBUG=
|
||||||
|
@ -2,9 +2,14 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .list import list_transactions_mined
|
from .list import (
|
||||||
from .list import list_transactions_account_mined
|
list_transactions_mined,
|
||||||
from .list import add_transaction
|
list_transactions_account_mined,
|
||||||
|
add_transaction,
|
||||||
|
tag_transaction,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
@ -2,8 +2,9 @@
|
|||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@ -50,7 +51,8 @@ def list_transactions_account_mined(
|
|||||||
|
|
||||||
|
|
||||||
def add_transaction(
|
def add_transaction(
|
||||||
session, tx_hash,
|
session,
|
||||||
|
tx_hash,
|
||||||
block_number,
|
block_number,
|
||||||
tx_index,
|
tx_index,
|
||||||
sender,
|
sender,
|
||||||
@ -62,6 +64,33 @@ def add_transaction(
|
|||||||
success,
|
success,
|
||||||
timestamp,
|
timestamp,
|
||||||
):
|
):
|
||||||
|
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param block_number: Block number
|
||||||
|
:type block_number: int
|
||||||
|
:param tx_index: Transaction index in block
|
||||||
|
:type tx_index: int
|
||||||
|
:param sender: Ethereum address of effective sender
|
||||||
|
:type sender: str, 0x-hex
|
||||||
|
:param receiver: Ethereum address of effective recipient
|
||||||
|
:type receiver: str, 0x-hex
|
||||||
|
:param source_token: Ethereum address of token used by sender
|
||||||
|
:type source_token: str, 0x-hex
|
||||||
|
:param destination_token: Ethereum address of token received by recipient
|
||||||
|
:type destination_token: str, 0x-hex
|
||||||
|
:param from_value: Source token value spent in transaction
|
||||||
|
:type from_value: int
|
||||||
|
:param to_value: Destination token value received in transaction
|
||||||
|
:type to_value: int
|
||||||
|
:param success: True if code execution on network was successful
|
||||||
|
:type success: bool
|
||||||
|
:param date_block: Block timestamp
|
||||||
|
:type date_block: datetime
|
||||||
|
"""
|
||||||
date_block = datetime.datetime.fromtimestamp(timestamp)
|
date_block = datetime.datetime.fromtimestamp(timestamp)
|
||||||
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
||||||
tx_hash,
|
tx_hash,
|
||||||
@ -77,3 +106,74 @@ def add_transaction(
|
|||||||
date_block,
|
date_block,
|
||||||
)
|
)
|
||||||
session.execute(s)
|
session.execute(s)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def tag_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Tag a single transaction with a single tag.
|
||||||
|
|
||||||
|
Tag must already exist in storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises ValueError: Unknown tag or transaction hash
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = text("SELECT id from tx where tx_hash = :a")
|
||||||
|
r = session.execute(s, {'a': tx_hash}).fetchall()
|
||||||
|
tx_id = r[0].values()[0]
|
||||||
|
|
||||||
|
if tx_id == None:
|
||||||
|
raise ValueError('unknown tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
#s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
if domain == None:
|
||||||
|
s = text("SELECT id from tag where value = :a")
|
||||||
|
else:
|
||||||
|
s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
|
||||||
|
tag_id = r[0].values()[0]
|
||||||
|
|
||||||
|
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
|
||||||
|
|
||||||
|
if tag_id == None:
|
||||||
|
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
|
||||||
|
|
||||||
|
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
|
||||||
|
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
|
||||||
|
|
||||||
|
|
||||||
|
def add_tag(
|
||||||
|
session,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Add a single tag to storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises sqlalchemy.exc.IntegrityError: Tag already exists
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = None
|
||||||
|
if domain == None:
|
||||||
|
s = text("INSERT INTO tag (value) VALUES (:b)")
|
||||||
|
else:
|
||||||
|
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
|
||||||
|
session.execute(s, {'a': domain, 'b': name})
|
||||||
|
@ -0,0 +1,38 @@
|
|||||||
|
"""Transaction tags
|
||||||
|
|
||||||
|
Revision ID: aaf2bdce7d6e
|
||||||
|
Revises: 6604de4203e2
|
||||||
|
Create Date: 2021-05-01 09:20:20.775082
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'aaf2bdce7d6e'
|
||||||
|
down_revision = '6604de4203e2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tag',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('domain', sa.String(), nullable=True),
|
||||||
|
sa.Column('value', sa.String(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'tag_tx_link',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
|
||||||
|
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tag_tx_link')
|
||||||
|
op.drop_index('idx_tag_domain_value')
|
||||||
|
op.drop_table('tag')
|
@ -1,2 +1,27 @@
|
|||||||
class SyncFilter:
|
class TagSyncFilter:
|
||||||
pass
|
"""Holds tag name and domain for an implementing filter.
|
||||||
|
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, domain=None):
|
||||||
|
self.tag_name = name
|
||||||
|
self.tag_domain = domain
|
||||||
|
|
||||||
|
|
||||||
|
def tag(self):
|
||||||
|
"""Return tag value/domain.
|
||||||
|
|
||||||
|
:rtype: Tuple
|
||||||
|
:returns: tag value/domain.
|
||||||
|
"""
|
||||||
|
return (self.tag_name, self.tag_domain)
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.tag_domain == None:
|
||||||
|
return self.tag_name
|
||||||
|
return '{}.{}'.format(self.tag_domain, self.tag_name)
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.address import (
|
from chainlib.eth.address import (
|
||||||
to_checksum_address,
|
to_checksum_address,
|
||||||
)
|
)
|
||||||
@ -13,17 +12,19 @@ from cic_eth_registry.error import (
|
|||||||
NotAContractError,
|
NotAContractError,
|
||||||
ContractMismatchError,
|
ContractMismatchError,
|
||||||
)
|
)
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import TagSyncFilter
|
||||||
from cic_cache import db as cic_cache_db
|
from cic_cache import db as cic_cache_db
|
||||||
|
|
||||||
logg = logging.getLogger().getChild(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ERC20TransferFilter(SyncFilter):
|
class ERC20TransferFilter(TagSyncFilter):
|
||||||
|
|
||||||
def __init__(self, chain_spec):
|
def __init__(self, chain_spec):
|
||||||
|
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
|
||||||
self.chain_spec = chain_spec
|
self.chain_spec = chain_spec
|
||||||
|
|
||||||
|
|
||||||
@ -46,6 +47,9 @@ class ERC20TransferFilter(SyncFilter):
|
|||||||
except RequestMismatchException:
|
except RequestMismatchException:
|
||||||
logg.debug('erc20 match but not a transfer, skipping')
|
logg.debug('erc20 match but not a transfer, skipping')
|
||||||
return False
|
return False
|
||||||
|
except ValueError:
|
||||||
|
logg.debug('erc20 match but bogus data, skipping')
|
||||||
|
return False
|
||||||
|
|
||||||
token_sender = tx.outputs[0]
|
token_sender = tx.outputs[0]
|
||||||
token_recipient = transfer_data[0]
|
token_recipient = transfer_data[0]
|
||||||
@ -67,7 +71,13 @@ class ERC20TransferFilter(SyncFilter):
|
|||||||
tx.status == Status.SUCCESS,
|
tx.status == Status.SUCCESS,
|
||||||
block.timestamp,
|
block.timestamp,
|
||||||
)
|
)
|
||||||
#db_session.flush()
|
db_session.flush()
|
||||||
|
cic_cache_db.tag_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
self.tag_name,
|
||||||
|
domain=self.tag_domain,
|
||||||
|
)
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -7,9 +7,10 @@ import argparse
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
|
import sqlalchemy
|
||||||
import rlp
|
import rlp
|
||||||
import cic_base.config
|
import cic_base.config
|
||||||
import cic_base.log
|
import cic_base.log
|
||||||
@ -34,7 +35,10 @@ from chainsyncer.driver import (
|
|||||||
from chainsyncer.db.models.base import SessionBase
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import (
|
||||||
|
dsn_from_config,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
from cic_cache.runnable.daemons.filters import (
|
from cic_cache.runnable.daemons.filters import (
|
||||||
ERC20TransferFilter,
|
ERC20TransferFilter,
|
||||||
)
|
)
|
||||||
@ -59,6 +63,17 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|||||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
|
||||||
|
def register_filter_tags(filters, session):
|
||||||
|
for f in filters:
|
||||||
|
tag = f.tag()
|
||||||
|
try:
|
||||||
|
add_tag(session, tag[0], domain=tag[1])
|
||||||
|
session.commit()
|
||||||
|
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
except sqlalchemy.exc.IntegrityError:
|
||||||
|
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# Connect to blockchain with chainlib
|
# Connect to blockchain with chainlib
|
||||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
@ -98,10 +113,19 @@ def main():
|
|||||||
|
|
||||||
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
|
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
|
||||||
|
|
||||||
|
filters = [
|
||||||
|
erc20_transfer_filter,
|
||||||
|
]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
register_filter_tags(filters, session)
|
||||||
|
session.close()
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for syncer in syncers:
|
for syncer in syncers:
|
||||||
logg.debug('running syncer index {}'.format(i))
|
logg.debug('running syncer index {}'.format(i))
|
||||||
syncer.add_filter(erc20_transfer_filter)
|
for f in filters:
|
||||||
|
syncer.add_filter(f)
|
||||||
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
||||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||||
|
|
||||||
|
@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
DEBUG=
|
DEBUG=0
|
||||||
|
@ -1,2 +1,4 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
|
chain_spec =
|
||||||
|
trust_address =
|
||||||
|
@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
DEBUG=
|
DEBUG=1
|
||||||
|
@ -43,10 +43,6 @@ COPY cic-cache/config/ /usr/local/etc/cic-cache/
|
|||||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
|
||||||
mkdir -p /usr/local/share/cic/solidity && \
|
|
||||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
|
||||||
|
|
||||||
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
||||||
COPY cic-cache/docker/db.sh ./db.sh
|
COPY cic-cache/docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./*.sh
|
RUN chmod 755 ./*.sh
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
cic-base~=0.1.2a77
|
cic-base~=0.1.2b8
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
uwsgi==2.0.19.1
|
uwsgi==2.0.19.1
|
||||||
moolb~=0.1.0
|
moolb~=0.1.0
|
||||||
cic-eth-registry~=0.5.4a16
|
cic-eth-registry~=0.5.5a4
|
||||||
SQLAlchemy==1.3.20
|
SQLAlchemy==1.3.20
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
chainsyncer[sql]~=0.0.2a2
|
chainsyncer[sql]~=0.0.2a4
|
||||||
|
@ -4,3 +4,8 @@ pytest-mock==3.3.1
|
|||||||
pysqlite3==0.4.3
|
pysqlite3==0.4.3
|
||||||
sqlparse==0.4.1
|
sqlparse==0.4.1
|
||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
|
eth_tester==0.5.0b3
|
||||||
|
py-evm==0.3.0a20
|
||||||
|
web3==5.12.2
|
||||||
|
cic-eth-registry~=0.5.5a3
|
||||||
|
cic-base[full]==0.1.2b8
|
||||||
|
@ -3,7 +3,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -84,3 +84,7 @@ def txs(
|
|||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return [
|
||||||
|
tx_hash_first,
|
||||||
|
tx_hash_second,
|
||||||
|
]
|
||||||
|
3
apps/cic-cache/tests/filters/conftest.py
Normal file
3
apps/cic-cache/tests/filters/conftest.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from chainlib.eth.pytest import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
|
69
apps/cic-cache/tests/filters/test_erc20.py
Normal file
69
apps/cic-cache/tests/filters/test_erc20.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy import text
|
||||||
|
from chainlib.eth.tx import Tx
|
||||||
|
from chainlib.eth.block import Block
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
assert r
|
||||||
|
|
||||||
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
|
assert r[0] == tx.hash
|
@ -2,7 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import confini
|
import confini
|
||||||
|
|
||||||
@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
|
|||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def load_config():
|
def load_config():
|
||||||
config_dir = os.path.join(root_dir, '.config/test')
|
config_dir = os.path.join(root_dir, 'config/test')
|
||||||
conf = confini.Config(config_dir, 'CICTEST')
|
conf = confini.Config(config_dir, 'CICTEST')
|
||||||
conf.process()
|
conf.process()
|
||||||
logg.debug('config {}'.format(conf))
|
logg.debug('config {}'.format(conf))
|
||||||
|
@ -3,13 +3,16 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import sqlparse
|
import sqlparse
|
||||||
|
import alembic
|
||||||
|
from alembic.config import Config as AlembicConfig
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
@ -26,11 +29,10 @@ def database_engine(
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
dsn = dsn_from_config(load_config)
|
dsn = dsn_from_config(load_config)
|
||||||
SessionBase.connect(dsn)
|
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
|
||||||
return dsn
|
return dsn
|
||||||
|
|
||||||
|
|
||||||
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def init_database(
|
def init_database(
|
||||||
load_config,
|
load_config,
|
||||||
@ -38,52 +40,23 @@ def init_database(
|
|||||||
):
|
):
|
||||||
|
|
||||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||||
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
|
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
if not os.path.isdir(migrationsdir):
|
||||||
rconn = SessionBase.engine.raw_connection()
|
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
logg.info('using migrations directory {}'.format(migrationsdir))
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
rconn.executescript(s)
|
|
||||||
|
|
||||||
else:
|
|
||||||
rconn = SessionBase.engine.raw_connection()
|
|
||||||
rcursor = rconn.cursor()
|
|
||||||
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
|
|
||||||
|
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
r = re.compile(r'^[A-Z]', re.MULTILINE)
|
|
||||||
for l in sqlparse.parse(s):
|
|
||||||
strl = str(l)
|
|
||||||
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
|
|
||||||
if not re.search(r, strl):
|
|
||||||
logg.warning('skipping parsed query line {}'.format(strl))
|
|
||||||
continue
|
|
||||||
rcursor.execute(strl)
|
|
||||||
rconn.commit()
|
|
||||||
|
|
||||||
rcursor.execute('SET search_path TO public')
|
|
||||||
|
|
||||||
# this doesn't work when run separately, no idea why
|
|
||||||
# functions have been manually added to original schema from cic-eth
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
#
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
|
|
||||||
rcursor.close()
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
|
||||||
|
ac.set_main_option('sqlalchemy.url', database_engine)
|
||||||
|
ac.set_main_option('script_location', migrationsdir)
|
||||||
|
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
yield session
|
yield session
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
@ -116,3 +89,14 @@ def list_defaults(
|
|||||||
return {
|
return {
|
||||||
'block': 420000,
|
'block': 420000,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def tags(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
|
||||||
|
add_tag(init_database, 'foo')
|
||||||
|
add_tag(init_database, 'baz', domain='bar')
|
||||||
|
add_tag(init_database, 'xyzzy', domain='bar')
|
||||||
|
init_database.commit()
|
||||||
|
@ -4,7 +4,7 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
37
apps/cic-cache/tests/test_tag.py
Normal file
37
apps/cic-cache/tests/test_tag.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import tag_transaction
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
tag_transaction(init_database, txs[0], 'foo')
|
||||||
|
tag_transaction(init_database, txs[0], 'baz', domain='bar')
|
||||||
|
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
|
||||||
|
assert r[0][0] == txs[1]
|
@ -3,7 +3,7 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from erc20_single_shot_faucet import SingleShotFaucet as Faucet
|
from erc20_faucet import Faucet
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
)
|
)
|
||||||
@ -20,8 +20,8 @@ from chainlib.eth.tx import (
|
|||||||
)
|
)
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.error import JSONRPCException
|
from chainlib.error import JSONRPCException
|
||||||
from eth_accounts_index import AccountRegistry
|
from eth_accounts_index.registry import AccountRegistry # TODO, use interface module instead (needs gas limit method)
|
||||||
from sarafu_faucet import MinterFaucet as Faucet
|
from sarafu_faucet import MinterFaucet
|
||||||
from chainqueue.db.models.tx import TxCache
|
from chainqueue.db.models.tx import TxCache
|
||||||
|
|
||||||
# local import
|
# local import
|
||||||
@ -127,13 +127,13 @@ def register(self, account_address, chain_spec_dict, writer_address=None):
|
|||||||
if writer_address == ZERO_ADDRESS:
|
if writer_address == ZERO_ADDRESS:
|
||||||
session.close()
|
session.close()
|
||||||
raise RoleMissingError('call address for resgistering {}'.format(account_address))
|
raise RoleMissingError('call address for resgistering {}'.format(account_address))
|
||||||
account_registry_address = registry.by_name('AccountRegistry', sender_address=call_address)
|
account_registry_address = registry.by_name('AccountsIndex', sender_address=call_address)
|
||||||
|
|
||||||
# Generate and sign transaction
|
# Generate and sign transaction
|
||||||
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
|
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
|
gas_oracle = self.create_gas_oracle(rpc, AccountsIndex.gas)
|
||||||
account_registry = AccountRegistry(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
account_registry = AccountsIndex(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
rpc_signer.disconnect()
|
rpc_signer.disconnect()
|
||||||
|
|
||||||
@ -185,7 +185,7 @@ def gift(self, account_address, chain_spec_dict):
|
|||||||
# Generate and sign transaction
|
# Generate and sign transaction
|
||||||
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
|
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
gas_oracle = self.create_gas_oracle(rpc, Faucet.gas)
|
gas_oracle = self.create_gas_oracle(rpc, MinterFaucet.gas)
|
||||||
faucet = Faucet(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
faucet = Faucet(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
rpc_signer.disconnect()
|
rpc_signer.disconnect()
|
||||||
@ -338,7 +338,7 @@ def cache_account_data(
|
|||||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||||
tx_data = AccountRegistry.parse_add_request(tx['data'])
|
tx_data = AccountsIndex.parse_add_request(tx['data'])
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
tx_cache = TxCache(
|
tx_cache = TxCache(
|
||||||
|
@ -6,7 +6,6 @@ import celery
|
|||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.connection import RPCConnection
|
from chainlib.connection import RPCConnection
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.tx import (
|
from chainlib.eth.tx import (
|
||||||
TxFormat,
|
TxFormat,
|
||||||
unpack,
|
unpack,
|
||||||
@ -16,6 +15,7 @@ from cic_eth_registry.erc20 import ERC20Token
|
|||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
from chainqueue.db.models.tx import TxCache
|
from chainqueue.db.models.tx import TxCache
|
||||||
from chainqueue.error import NotLocalTxError
|
from chainqueue.error import NotLocalTxError
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
@ -7,7 +7,7 @@ from chainlib.chain import ChainSpec
|
|||||||
from chainlib.connection import RPCConnection
|
from chainlib.connection import RPCConnection
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from eth_address_declarator import AddressDeclarator
|
from eth_address_declarator import Declarator
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.task import BaseTask
|
from cic_eth.task import BaseTask
|
||||||
@ -23,12 +23,12 @@ def translate_address(address, trusted_addresses, chain_spec, sender_address=ZER
|
|||||||
registry = CICRegistry(chain_spec, rpc)
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
|
||||||
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||||
c = AddressDeclarator(chain_spec)
|
c = Declarator(chain_spec)
|
||||||
|
|
||||||
for trusted_address in trusted_addresses:
|
for trusted_address in trusted_addresses:
|
||||||
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
|
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
|
||||||
r = rpc.do(o)
|
r = rpc.do(o)
|
||||||
declaration_hex = AddressDeclarator.parse_declaration(r)
|
declaration_hex = Declarator.parse_declaration(r)
|
||||||
declaration_hex = declaration_hex[0].rstrip('0')
|
declaration_hex = declaration_hex[0].rstrip('0')
|
||||||
declaration_bytes = bytes.fromhex(declaration_hex)
|
declaration_bytes = bytes.fromhex(declaration_hex)
|
||||||
declaration = None
|
declaration = None
|
||||||
|
@ -14,13 +14,13 @@ from chainlib.eth.tx import (
|
|||||||
)
|
)
|
||||||
from chainlib.eth.block import block_by_number
|
from chainlib.eth.block import block_by_number
|
||||||
from chainlib.eth.contract import abi_decode_single
|
from chainlib.eth.contract import abi_decode_single
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_eth_registry.erc20 import ERC20Token
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
from chainqueue.db.models.otx import Otx
|
from chainqueue.db.models.otx import Otx
|
||||||
from chainqueue.db.enum import StatusEnum
|
from chainqueue.db.enum import StatusEnum
|
||||||
from chainqueue.query import get_tx_cache
|
from chainqueue.query import get_tx_cache
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.queue.time import tx_times
|
from cic_eth.queue.time import tx_times
|
||||||
|
@ -13,9 +13,7 @@ from hexathon import (
|
|||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
)
|
)
|
||||||
# TODO: use sarafu_Faucet for both when inheritance has been implemented
|
from erc20_faucet import Faucet
|
||||||
from erc20_single_shot_faucet import SingleShotFaucet
|
|
||||||
from sarafu_faucet import MinterFaucet as Faucet
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
@ -71,14 +69,13 @@ class CallbackFilter(SyncFilter):
|
|||||||
#transfer_data['token_address'] = tx.inputs[0]
|
#transfer_data['token_address'] = tx.inputs[0]
|
||||||
faucet_contract = tx.inputs[0]
|
faucet_contract = tx.inputs[0]
|
||||||
|
|
||||||
c = SingleShotFaucet(self.chain_spec)
|
o = Faucet.token(faucet_contract, sender_address=self.caller_address)
|
||||||
o = c.token(faucet_contract, sender_address=self.caller_address)
|
|
||||||
r = conn.do(o)
|
r = conn.do(o)
|
||||||
transfer_data['token_address'] = add_0x(c.parse_token(r))
|
transfer_data['token_address'] = add_0x(c.parse_token(r))
|
||||||
|
|
||||||
o = c.amount(faucet_contract, sender_address=self.caller_address)
|
o = c.token_amount(faucet_contract, sender_address=self.caller_address)
|
||||||
r = conn.do(o)
|
r = conn.do(o)
|
||||||
transfer_data['value'] = c.parse_amount(r)
|
transfer_data['value'] = c.parse_token_amount(r)
|
||||||
|
|
||||||
return ('tokengift', transfer_data)
|
return ('tokengift', transfer_data)
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ from cic_eth.eth import (
|
|||||||
from cic_eth.admin import (
|
from cic_eth.admin import (
|
||||||
debug,
|
debug,
|
||||||
ctrl,
|
ctrl,
|
||||||
|
token
|
||||||
)
|
)
|
||||||
from cic_eth.queue import (
|
from cic_eth.queue import (
|
||||||
query,
|
query,
|
||||||
|
@ -10,7 +10,7 @@ version = (
|
|||||||
0,
|
0,
|
||||||
11,
|
11,
|
||||||
0,
|
0,
|
||||||
'beta.11',
|
'beta.12',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
@ -29,7 +29,7 @@ RUN /usr/local/bin/python -m pip install --upgrade pip
|
|||||||
# python merge_requirements.py | tee merged_requirements.txt
|
# python merge_requirements.py | tee merged_requirements.txt
|
||||||
#RUN cd cic-base && \
|
#RUN cd cic-base && \
|
||||||
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
|
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a77
|
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b8
|
||||||
|
|
||||||
COPY cic-eth/scripts/ scripts/
|
COPY cic-eth/scripts/ scripts/
|
||||||
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
||||||
@ -50,8 +50,4 @@ COPY cic-eth/config/ /usr/local/etc/cic-eth/
|
|||||||
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
|
||||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
|
||||||
mkdir -p /usr/local/share/cic/solidity && \
|
|
||||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
|
||||||
|
|
||||||
COPY util/liveness/health.sh /usr/local/bin/health.sh
|
COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
@ -1,25 +1,24 @@
|
|||||||
cic-base==0.1.2b5
|
cic-base==0.1.2b8
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
crypto-dev-signer~=0.4.14b3
|
crypto-dev-signer~=0.4.14b3
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
cic-eth-registry~=0.5.4a16
|
cic-eth-registry~=0.5.5a4
|
||||||
#cic-bancor~=0.0.6
|
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
websockets==8.1
|
websockets==8.1
|
||||||
requests~=2.24.0
|
requests~=2.24.0
|
||||||
eth_accounts_index~=0.0.11a9
|
eth_accounts_index~=0.0.11a11
|
||||||
erc20-transfer-authorization~=0.3.1a5
|
erc20-transfer-authorization~=0.3.1a6
|
||||||
uWSGI==2.0.19.1
|
uWSGI==2.0.19.1
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
websocket-client==0.57.0
|
websocket-client==0.57.0
|
||||||
moolb~=0.1.1b2
|
moolb~=0.1.1b2
|
||||||
eth-address-index~=0.1.1a9
|
eth-address-index~=0.1.1a11
|
||||||
chainlib~=0.0.2a20
|
chainlib~=0.0.3a1
|
||||||
hexathon~=0.0.1a7
|
hexathon~=0.0.1a7
|
||||||
chainsyncer[sql]~=0.0.2a2
|
chainsyncer[sql]~=0.0.2a4
|
||||||
chainqueue~=0.0.2a2
|
chainqueue~=0.0.2a2
|
||||||
pysha3==1.0.2
|
sarafu-faucet==0.0.3a1
|
||||||
coincurve==15.0.0
|
coincurve==15.0.0
|
||||||
sarafu-faucet==0.0.2a28
|
sarafu-faucet==0.0.3a2
|
||||||
potaahto~=0.0.1a1
|
potaahto~=0.0.1a1
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import alembic
|
import alembic
|
||||||
from alembic.config import Config as AlembicConfig
|
from alembic.config import Config as AlembicConfig
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[pgp]
|
[pgp]
|
||||||
exports_dir = pgp
|
exports_dir = /root/pgp
|
||||||
privatekey_file = privatekeys.asc
|
privatekey_file = privatekeys.asc
|
||||||
passphrase = merman
|
passphrase = merman
|
||||||
publickey_trusted_file = publickeys.asc
|
publickey_trusted_file = publickeys.asc
|
||||||
|
1
apps/cic-meta/.gitignore
vendored
1
apps/cic-meta/.gitignore
vendored
@ -3,4 +3,3 @@ dist
|
|||||||
dist-web
|
dist-web
|
||||||
dist-server
|
dist-server
|
||||||
scratch
|
scratch
|
||||||
tests
|
|
||||||
|
@ -2,26 +2,31 @@ FROM node:15.3.0-alpine3.10
|
|||||||
|
|
||||||
WORKDIR /tmp/src/cic-meta
|
WORKDIR /tmp/src/cic-meta
|
||||||
|
|
||||||
|
RUN apk add --no-cache postgresql bash
|
||||||
|
|
||||||
COPY cic-meta/package.json \
|
COPY cic-meta/package.json \
|
||||||
./
|
./
|
||||||
|
|
||||||
COPY cic-meta/src/ src/
|
COPY cic-meta/src/ src/
|
||||||
COPY cic-meta/tests/ tests/
|
COPY cic-meta/tests/ tests/
|
||||||
COPY cic-meta/scripts/ scripts/
|
COPY cic-meta/scripts/ scripts/
|
||||||
#COPY docker/*.sh /root/
|
|
||||||
|
|
||||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
|
||||||
|
|
||||||
RUN npm install
|
RUN npm install
|
||||||
|
|
||||||
|
# see exports_dir gpg.ini
|
||||||
|
COPY cic-meta/tests/*.asc /root/pgp/
|
||||||
|
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||||
|
|
||||||
|
|
||||||
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
||||||
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
|
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
|
||||||
|
|
||||||
COPY cic-meta/docker/db.sh ./db.sh
|
COPY cic-meta/docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./db.sh
|
RUN chmod 755 ./db.sh
|
||||||
|
|
||||||
RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
|
#RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
|
||||||
ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
|
#ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
|
||||||
|
|
||||||
# COPY cic-meta/docker/start_server.sh ./start_server.sh
|
COPY cic-meta/docker/start_server.sh ./start_server.sh
|
||||||
# RUN chmod 755 ./start_server.sh
|
RUN chmod 755 ./start_server.sh
|
||||||
|
ENTRYPOINT ["sh", "./start_server.sh"]
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
PGPASSWORD=$DATABASE_PASSWORD psql -v ON_ERROR_STOP=1 -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME -f $SCHEMA_SQL_PATH
|
||||||
|
|
||||||
|
|
||||||
PGPASSWORD=$DATABASE_PASSWORD psql -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME /usr/local/share/cic-meta/sql/server.sql
|
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# db migration
|
||||||
sh ./db.sh
|
sh ./db.sh
|
||||||
|
|
||||||
/usr/local/bin/node /usr/local/bin/cic-meta-server $@
|
# /usr/local/bin/node /usr/local/bin/cic-meta-server $@
|
||||||
|
# ./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts $@
|
||||||
|
npm run start "$@"
|
||||||
|
101
apps/cic-meta/package-lock.json
generated
101
apps/cic-meta/package-lock.json
generated
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cic-client-meta",
|
"name": "cic-client-meta",
|
||||||
"version": "0.0.7-alpha.2",
|
"version": "0.0.7-alpha.7",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@ -852,6 +852,75 @@
|
|||||||
"printj": "~1.1.0"
|
"printj": "~1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"crdt-meta": {
|
||||||
|
"version": "0.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/crdt-meta/-/crdt-meta-0.0.8.tgz",
|
||||||
|
"integrity": "sha512-CS0sS0L2QWthz7vmu6vzl3p4kcpJ+IKILBJ4tbgN4A3iNG8wnBeuDIv/z3KFFQjcfuP4QAh6E9LywKUTxtDc3g==",
|
||||||
|
"requires": {
|
||||||
|
"automerge": "^0.14.2",
|
||||||
|
"ini": "^1.3.8",
|
||||||
|
"openpgp": "^4.10.8",
|
||||||
|
"pg": "^8.5.1",
|
||||||
|
"sqlite3": "^5.0.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"automerge": {
|
||||||
|
"version": "0.14.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/automerge/-/automerge-0.14.2.tgz",
|
||||||
|
"integrity": "sha512-shiwuJHCbNRI23WZyIECLV4Ovf3WiAFJ7P9BH4l5gON1In/UUbjcSJKRygtIirObw2UQumeYxp3F2XBdSvQHnA==",
|
||||||
|
"requires": {
|
||||||
|
"immutable": "^3.8.2",
|
||||||
|
"transit-immutable-js": "^0.7.0",
|
||||||
|
"transit-js": "^0.8.861",
|
||||||
|
"uuid": "^3.4.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node-addon-api": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw=="
|
||||||
|
},
|
||||||
|
"pg": {
|
||||||
|
"version": "8.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg/-/pg-8.6.0.tgz",
|
||||||
|
"integrity": "sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==",
|
||||||
|
"requires": {
|
||||||
|
"buffer-writer": "2.0.0",
|
||||||
|
"packet-reader": "1.0.0",
|
||||||
|
"pg-connection-string": "^2.5.0",
|
||||||
|
"pg-pool": "^3.3.0",
|
||||||
|
"pg-protocol": "^1.5.0",
|
||||||
|
"pg-types": "^2.1.0",
|
||||||
|
"pgpass": "1.x"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pg-connection-string": {
|
||||||
|
"version": "2.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz",
|
||||||
|
"integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ=="
|
||||||
|
},
|
||||||
|
"pg-pool": {
|
||||||
|
"version": "3.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.3.0.tgz",
|
||||||
|
"integrity": "sha512-0O5huCql8/D6PIRFAlmccjphLYWC+JIzvUhSzXSpGaf+tjTZc4nn+Lr7mLXBbFJfvwbP0ywDv73EiaBsxn7zdg=="
|
||||||
|
},
|
||||||
|
"pg-protocol": {
|
||||||
|
"version": "1.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz",
|
||||||
|
"integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ=="
|
||||||
|
},
|
||||||
|
"sqlite3": {
|
||||||
|
"version": "5.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-5.0.2.tgz",
|
||||||
|
"integrity": "sha512-1SdTNo+BVU211Xj1csWa8lV6KM0CtucDwRyA0VHl91wEH1Mgh7RxUpI4rVvG7OhHrzCSGaVyW5g8vKvlrk9DJA==",
|
||||||
|
"requires": {
|
||||||
|
"node-addon-api": "^3.0.0",
|
||||||
|
"node-gyp": "3.x",
|
||||||
|
"node-pre-gyp": "^0.11.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"create-hash": {
|
"create-hash": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
|
||||||
@ -966,17 +1035,17 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"elliptic": {
|
"elliptic": {
|
||||||
"version": "6.5.3",
|
"version": "6.5.4",
|
||||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz",
|
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
|
||||||
"integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==",
|
"integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"bn.js": "^4.4.0",
|
"bn.js": "^4.11.9",
|
||||||
"brorand": "^1.0.1",
|
"brorand": "^1.1.0",
|
||||||
"hash.js": "^1.0.0",
|
"hash.js": "^1.0.0",
|
||||||
"hmac-drbg": "^1.0.0",
|
"hmac-drbg": "^1.0.1",
|
||||||
"inherits": "^2.0.1",
|
"inherits": "^2.0.4",
|
||||||
"minimalistic-assert": "^1.0.0",
|
"minimalistic-assert": "^1.0.1",
|
||||||
"minimalistic-crypto-utils": "^1.0.0"
|
"minimalistic-crypto-utils": "^1.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"emoji-regex": {
|
"emoji-regex": {
|
||||||
@ -1489,9 +1558,9 @@
|
|||||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||||
},
|
},
|
||||||
"ini": {
|
"ini": {
|
||||||
"version": "1.3.5",
|
"version": "1.3.8",
|
||||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
|
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||||
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw=="
|
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
|
||||||
},
|
},
|
||||||
"interpret": {
|
"interpret": {
|
||||||
"version": "2.2.0",
|
"version": "2.2.0",
|
||||||
@ -1957,9 +2026,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"y18n": {
|
"y18n": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
|
||||||
"integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
|
"integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"yargs": {
|
"yargs": {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cic-client-meta",
|
"name": "cic-client-meta",
|
||||||
"version": "0.0.7-alpha.6",
|
"version": "0.0.7-alpha.8",
|
||||||
"description": "Signed CRDT metadata graphs for the CIC network",
|
"description": "Signed CRDT metadata graphs for the CIC network",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@ -10,13 +10,15 @@
|
|||||||
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
|
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
|
||||||
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
|
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
|
||||||
"clean": "rm -rf dist",
|
"clean": "rm -rf dist",
|
||||||
"prepare": "npm run build && npm run build-server"
|
"prepare": "npm run build && npm run build-server",
|
||||||
|
"start": "./node_modules/ts-node/dist/bin.js ./scripts/server/server.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ethereumjs/tx": "^3.0.0-beta.1",
|
"@ethereumjs/tx": "^3.0.0-beta.1",
|
||||||
"automerge": "^0.14.1",
|
"automerge": "^0.14.1",
|
||||||
|
"crdt-meta": "0.0.8",
|
||||||
"ethereumjs-wallet": "^1.0.1",
|
"ethereumjs-wallet": "^1.0.1",
|
||||||
"ini": "^1.3.5",
|
"ini": "^1.3.8",
|
||||||
"openpgp": "^4.10.8",
|
"openpgp": "^4.10.8",
|
||||||
"pg": "^8.4.2",
|
"pg": "^8.4.2",
|
||||||
"sqlite3": "^5.0.0",
|
"sqlite3": "^5.0.0",
|
||||||
@ -40,6 +42,6 @@
|
|||||||
],
|
],
|
||||||
"license": "GPL-3.0-or-later",
|
"license": "GPL-3.0-or-later",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "~14.16.1"
|
"node": ">=14.16.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
const config = require('./src/config');
|
import { Config } from 'crdt-meta';
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|
||||||
if (process.argv[2] === undefined) {
|
if (process.argv[2] === undefined) {
|
||||||
@ -15,6 +15,6 @@ try {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const c = new config.Config(process.argv[2], process.env['CONFINI_ENV_PREFIX']);
|
const c = new Config(process.argv[2], process.env['CONFINI_ENV_PREFIX']);
|
||||||
c.process();
|
c.process();
|
||||||
process.stdout.write(c.toString());
|
process.stdout.write(c.toString());
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
psql -v ON_ERROR_STOP=1 --username grassroots --dbname cic_meta <<-EOSQL
|
|
||||||
create table if not exists store (
|
|
||||||
id serial primary key not null,
|
|
||||||
owner_fingerprint text not null,
|
|
||||||
hash char(64) not null unique,
|
|
||||||
content text not null
|
|
||||||
);
|
|
||||||
|
|
||||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
|
||||||
EOSQL
|
|
||||||
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
|||||||
create table if not exists cic_meta.store (
|
create table if not exists store (
|
||||||
id serial primary key not null,
|
id serial primary key not null,
|
||||||
owner_fingerprint text not null,
|
owner_fingerprint text not null,
|
||||||
hash char(64) not null unique,
|
hash char(64) not null unique,
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
import * as Automerge from 'automerge';
|
import * as Automerge from 'automerge';
|
||||||
import * as pgp from 'openpgp';
|
import * as pgp from 'openpgp';
|
||||||
import * as pg from 'pg';
|
|
||||||
|
|
||||||
import { Envelope, Syncable } from '../../src/sync';
|
import { Envelope, Syncable } from 'crdt-meta';
|
||||||
|
|
||||||
|
|
||||||
function handleNoMergeGet(db, digest, keystore) {
|
function handleNoMergeGet(db, digest, keystore) {
|
||||||
|
@ -1,15 +1,11 @@
|
|||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as pgp from 'openpgp';
|
|
||||||
|
|
||||||
import * as handlers from './handlers';
|
import * as handlers from './handlers';
|
||||||
import { Envelope, Syncable } from '../../src/sync';
|
import { PGPKeyStore, PGPSigner, Config, SqliteAdapter, PostgresAdapter } from 'crdt-meta';
|
||||||
import { PGPKeyStore, PGPSigner } from '../../src/auth';
|
|
||||||
|
|
||||||
import { standardArgs } from './args';
|
import { standardArgs } from './args';
|
||||||
import { Config } from '../../src/config';
|
|
||||||
import { SqliteAdapter, PostgresAdapter } from '../../src/db';
|
|
||||||
|
|
||||||
let configPath = '/usr/local/etc/cic-meta';
|
let configPath = '/usr/local/etc/cic-meta';
|
||||||
|
|
||||||
|
@ -1,191 +0,0 @@
|
|||||||
import * as pgp from 'openpgp';
|
|
||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
interface Signable {
|
|
||||||
digest():string;
|
|
||||||
}
|
|
||||||
|
|
||||||
type KeyGetter = () => any;
|
|
||||||
|
|
||||||
type Signature = {
|
|
||||||
engine:string
|
|
||||||
algo:string
|
|
||||||
data:string
|
|
||||||
digest:string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Signer {
|
|
||||||
prepare(Signable):boolean;
|
|
||||||
onsign(Signature):void;
|
|
||||||
onverify(boolean):void;
|
|
||||||
sign(digest:string):void
|
|
||||||
verify(digest:string, signature:Signature):void
|
|
||||||
fingerprint():string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Authoritative {
|
|
||||||
}
|
|
||||||
|
|
||||||
interface KeyStore {
|
|
||||||
getPrivateKey: KeyGetter
|
|
||||||
getFingerprint: () => string
|
|
||||||
getTrustedKeys: () => Array<any>
|
|
||||||
getTrustedActiveKeys: () => Array<any>
|
|
||||||
getEncryptKeys: () => Array<any>
|
|
||||||
}
|
|
||||||
|
|
||||||
class PGPKeyStore implements KeyStore {
|
|
||||||
|
|
||||||
fingerprint: string
|
|
||||||
pk: any
|
|
||||||
|
|
||||||
pubk = {
|
|
||||||
active: [],
|
|
||||||
trusted: [],
|
|
||||||
encrypt: [],
|
|
||||||
}
|
|
||||||
loads = 0x00;
|
|
||||||
loadsTarget = 0x0f;
|
|
||||||
onload: (k:KeyStore) => void;
|
|
||||||
|
|
||||||
constructor(passphrase:string, pkArmor:string, pubkActiveArmor:string, pubkTrustedArmor:string, pubkEncryptArmor:string, onload = (ks:KeyStore) => {}) {
|
|
||||||
this._readKey(pkArmor, undefined, 1, passphrase);
|
|
||||||
this._readKey(pubkActiveArmor, 'active', 2);
|
|
||||||
this._readKey(pubkTrustedArmor, 'trusted', 4);
|
|
||||||
this._readKey(pubkEncryptArmor, 'encrypt', 8);
|
|
||||||
this.onload = onload;
|
|
||||||
}
|
|
||||||
|
|
||||||
private _readKey(a:string, x:any, n:number, pass?:string) {
|
|
||||||
pgp.key.readArmored(a).then((k) => {
|
|
||||||
if (pass !== undefined) {
|
|
||||||
this.pk = k.keys[0];
|
|
||||||
this.pk.decrypt(pass).then(() => {
|
|
||||||
this.fingerprint = this.pk.getFingerprint();
|
|
||||||
console.log('private key (sign)', this.fingerprint);
|
|
||||||
this._registerLoad(n);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
this.pubk[x] = k.keys;
|
|
||||||
k.keys.forEach((pubk) => {
|
|
||||||
console.log('public key (' + x + ')', pubk.getFingerprint());
|
|
||||||
});
|
|
||||||
this._registerLoad(n);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private _registerLoad(b:number) {
|
|
||||||
this.loads |= b;
|
|
||||||
if (this.loads == this.loadsTarget) {
|
|
||||||
this.onload(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public getTrustedKeys(): Array<any> {
|
|
||||||
return this.pubk['trusted'];
|
|
||||||
}
|
|
||||||
|
|
||||||
public getTrustedActiveKeys(): Array<any> {
|
|
||||||
return this.pubk['active'];
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public getEncryptKeys(): Array<any> {
|
|
||||||
return this.pubk['encrypt'];
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public getPrivateKey(): any {
|
|
||||||
return this.pk;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getFingerprint(): string {
|
|
||||||
return this.fingerprint;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class PGPSigner implements Signer {
|
|
||||||
|
|
||||||
engine = 'pgp'
|
|
||||||
algo = 'sha256'
|
|
||||||
dgst: string
|
|
||||||
signature: Signature
|
|
||||||
keyStore: KeyStore
|
|
||||||
onsign: (Signature) => void
|
|
||||||
onverify: (boolean) => void
|
|
||||||
|
|
||||||
constructor(keyStore:KeyStore) {
|
|
||||||
this.keyStore = keyStore
|
|
||||||
this.onsign = (string) => {};
|
|
||||||
this.onverify = (boolean) => {};
|
|
||||||
}
|
|
||||||
|
|
||||||
public fingerprint(): string {
|
|
||||||
return this.keyStore.getFingerprint();
|
|
||||||
}
|
|
||||||
|
|
||||||
public prepare(material:Signable):boolean {
|
|
||||||
this.dgst = material.digest();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public verify(digest:string, signature:Signature) {
|
|
||||||
pgp.signature.readArmored(signature.data).then((s) => {
|
|
||||||
const opts = {
|
|
||||||
message: pgp.cleartext.fromText(digest),
|
|
||||||
publicKeys: this.keyStore.getTrustedKeys(),
|
|
||||||
signature: s,
|
|
||||||
};
|
|
||||||
pgp.verify(opts).then((v) => {
|
|
||||||
let i = 0;
|
|
||||||
for (i = 0; i < v.signatures.length; i++) {
|
|
||||||
const s = v.signatures[i];
|
|
||||||
if (s.valid) {
|
|
||||||
this.onverify(s);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.error('checked ' + i + ' signature(s) but none valid');
|
|
||||||
this.onverify(false);
|
|
||||||
});
|
|
||||||
}).catch((e) => {
|
|
||||||
console.error(e);
|
|
||||||
this.onverify(false);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public sign(digest:string) {
|
|
||||||
const m = pgp.cleartext.fromText(digest);
|
|
||||||
const pk = this.keyStore.getPrivateKey();
|
|
||||||
const opts = {
|
|
||||||
message: m,
|
|
||||||
privateKeys: [pk],
|
|
||||||
detached: true,
|
|
||||||
}
|
|
||||||
pgp.sign(opts).then((s) => {
|
|
||||||
this.signature = {
|
|
||||||
engine: this.engine,
|
|
||||||
algo: this.algo,
|
|
||||||
data: s.signature,
|
|
||||||
// TODO: fix for browser later
|
|
||||||
digest: digest,
|
|
||||||
};
|
|
||||||
this.onsign(this.signature);
|
|
||||||
}).catch((e) => {
|
|
||||||
console.error(e);
|
|
||||||
this.onsign(undefined);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
Signature,
|
|
||||||
Authoritative,
|
|
||||||
Signer,
|
|
||||||
KeyGetter,
|
|
||||||
Signable,
|
|
||||||
KeyStore,
|
|
||||||
PGPSigner,
|
|
||||||
PGPKeyStore,
|
|
||||||
};
|
|
@ -1,71 +0,0 @@
|
|||||||
import * as fs from 'fs';
|
|
||||||
import * as ini from 'ini';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
class Config {
|
|
||||||
|
|
||||||
filepath: string
|
|
||||||
store: Object
|
|
||||||
censor: Array<string>
|
|
||||||
require: Array<string>
|
|
||||||
env_prefix: string
|
|
||||||
|
|
||||||
constructor(filepath:string, env_prefix?:string) {
|
|
||||||
this.filepath = filepath;
|
|
||||||
this.store = {};
|
|
||||||
this.censor = [];
|
|
||||||
this.require = [];
|
|
||||||
this.env_prefix = '';
|
|
||||||
if (env_prefix !== undefined) {
|
|
||||||
this.env_prefix = env_prefix + "_";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public process() {
|
|
||||||
const d = fs.readdirSync(this.filepath);
|
|
||||||
|
|
||||||
const r = /.*\.ini$/;
|
|
||||||
for (let i = 0; i < d.length; i++) {
|
|
||||||
const f = d[i];
|
|
||||||
if (!f.match(r)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const fp = path.join(this.filepath, f);
|
|
||||||
const v = fs.readFileSync(fp, 'utf-8');
|
|
||||||
const inid = ini.decode(v);
|
|
||||||
const inik = Object.keys(inid);
|
|
||||||
for (let j = 0; j < inik.length; j++) {
|
|
||||||
const k_section = inik[j]
|
|
||||||
const k = k_section.toUpperCase();
|
|
||||||
Object.keys(inid[k_section]).forEach((k_directive) => {
|
|
||||||
const kk = k_directive.toUpperCase();
|
|
||||||
const kkk = k + '_' + kk;
|
|
||||||
|
|
||||||
let r = inid[k_section][k_directive];
|
|
||||||
const k_env = this.env_prefix + kkk
|
|
||||||
const env = process.env[k_env];
|
|
||||||
if (env !== undefined) {
|
|
||||||
console.debug('Environment variable ' + k_env + ' overrides ' + kkk);
|
|
||||||
r = env;
|
|
||||||
}
|
|
||||||
this.store[kkk] = r;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public get(s:string) {
|
|
||||||
return this.store[s];
|
|
||||||
}
|
|
||||||
|
|
||||||
public toString() {
|
|
||||||
let s = '';
|
|
||||||
Object.keys(this.store).forEach((k) => {
|
|
||||||
s += k + '=' + this.store[k] + '\n';
|
|
||||||
});
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { Config };
|
|
@ -1,38 +0,0 @@
|
|||||||
import { JSONSerializable } from './format';
|
|
||||||
|
|
||||||
const ENGINE_NAME = 'automerge';
|
|
||||||
const ENGINE_VERSION = '0.14.1';
|
|
||||||
|
|
||||||
const NETWORK_NAME = 'cic';
|
|
||||||
const NETWORK_VERSION = '1';
|
|
||||||
|
|
||||||
const CRYPTO_NAME = 'pgp';
|
|
||||||
const CRYPTO_VERSION = '2';
|
|
||||||
|
|
||||||
type VersionedSpec = {
|
|
||||||
name: string
|
|
||||||
version: string
|
|
||||||
ext?: Object
|
|
||||||
}
|
|
||||||
|
|
||||||
const engineSpec:VersionedSpec = {
|
|
||||||
name: ENGINE_NAME,
|
|
||||||
version: ENGINE_VERSION,
|
|
||||||
}
|
|
||||||
|
|
||||||
const cryptoSpec:VersionedSpec = {
|
|
||||||
name: CRYPTO_NAME,
|
|
||||||
version: CRYPTO_VERSION,
|
|
||||||
}
|
|
||||||
|
|
||||||
const networkSpec:VersionedSpec = {
|
|
||||||
name: NETWORK_NAME,
|
|
||||||
version: NETWORK_VERSION,
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
engineSpec,
|
|
||||||
cryptoSpec,
|
|
||||||
networkSpec,
|
|
||||||
VersionedSpec,
|
|
||||||
};
|
|
@ -1,27 +0,0 @@
|
|||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
const _algs = {
|
|
||||||
'SHA-256': 'sha256',
|
|
||||||
}
|
|
||||||
|
|
||||||
function cryptoWrapper() {
|
|
||||||
}
|
|
||||||
|
|
||||||
cryptoWrapper.prototype.digest = async function(s, d) {
|
|
||||||
const h = crypto.createHash(_algs[s]);
|
|
||||||
h.update(d);
|
|
||||||
return h.digest();
|
|
||||||
}
|
|
||||||
|
|
||||||
let subtle = undefined;
|
|
||||||
if (typeof window !== 'undefined') {
|
|
||||||
subtle = window.crypto.subtle;
|
|
||||||
} else {
|
|
||||||
subtle = new cryptoWrapper();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export {
|
|
||||||
subtle,
|
|
||||||
}
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
|||||||
import * as pg from 'pg';
|
|
||||||
import * as sqlite from 'sqlite3';
|
|
||||||
|
|
||||||
type DbConfig = {
|
|
||||||
name: string
|
|
||||||
host: string
|
|
||||||
port: number
|
|
||||||
user: string
|
|
||||||
password: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DbAdapter {
|
|
||||||
query: (s:string, callback:(e:any, rs:any) => void) => void
|
|
||||||
close: () => void
|
|
||||||
}
|
|
||||||
|
|
||||||
const re_creatematch = /^(CREATE)/i
|
|
||||||
const re_getmatch = /^(SELECT)/i;
|
|
||||||
const re_setmatch = /^(INSERT|UPDATE)/i;
|
|
||||||
|
|
||||||
class SqliteAdapter implements DbAdapter {
|
|
||||||
|
|
||||||
db: any
|
|
||||||
|
|
||||||
constructor(dbConfig:DbConfig, callback?:(any) => void) {
|
|
||||||
this.db = new sqlite.Database(dbConfig.name); //, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
public query(s:string, callback:(e:any, rs?:any) => void): void {
|
|
||||||
const local_callback = (e, rs) => {
|
|
||||||
let r = undefined;
|
|
||||||
if (rs !== undefined) {
|
|
||||||
r = {
|
|
||||||
rowCount: rs.length,
|
|
||||||
rows: rs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
callback(e, r);
|
|
||||||
};
|
|
||||||
if (s.match(re_getmatch)) {
|
|
||||||
this.db.all(s, local_callback);
|
|
||||||
} else if (s.match(re_setmatch)) {
|
|
||||||
this.db.run(s, local_callback);
|
|
||||||
} else if (s.match(re_creatematch)) {
|
|
||||||
this.db.run(s, callback);
|
|
||||||
} else {
|
|
||||||
throw 'unhandled query';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public close() {
|
|
||||||
this.db.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class PostgresAdapter implements DbAdapter {
|
|
||||||
|
|
||||||
db: any
|
|
||||||
|
|
||||||
constructor(dbConfig:DbConfig) {
|
|
||||||
let o = dbConfig;
|
|
||||||
o['database'] = o.name;
|
|
||||||
this.db = new pg.Pool(o);
|
|
||||||
return this.db;
|
|
||||||
}
|
|
||||||
|
|
||||||
public query(s:string, callback:(e:any, rs:any) => void): void {
|
|
||||||
this.db.query(s, (e, rs) => {
|
|
||||||
let r = {
|
|
||||||
length: rs.rowCount,
|
|
||||||
}
|
|
||||||
rs.length = rs.rowCount;
|
|
||||||
if (e === undefined) {
|
|
||||||
e = null;
|
|
||||||
}
|
|
||||||
console.debug(e, rs);
|
|
||||||
callback(e, rs);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public close() {
|
|
||||||
this.db.end();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
DbConfig,
|
|
||||||
SqliteAdapter,
|
|
||||||
PostgresAdapter,
|
|
||||||
}
|
|
@ -1,68 +0,0 @@
|
|||||||
import * as crypto from './crypto';
|
|
||||||
|
|
||||||
interface Addressable {
|
|
||||||
key(): string
|
|
||||||
digest(): string
|
|
||||||
}
|
|
||||||
|
|
||||||
function stringToBytes(s:string) {
|
|
||||||
const a = new Uint8Array(20);
|
|
||||||
let j = 2;
|
|
||||||
for (let i = 0; i < a.byteLength; i++) {
|
|
||||||
const n = parseInt(s.substring(j, j+2), 16);
|
|
||||||
a[i] = n;
|
|
||||||
j += 2;
|
|
||||||
}
|
|
||||||
return a;
|
|
||||||
}
|
|
||||||
|
|
||||||
function bytesToHex(a:Uint8Array) {
|
|
||||||
let s = '';
|
|
||||||
for (let i = 0; i < a.byteLength; i++) {
|
|
||||||
const h = '00' + a[i].toString(16);
|
|
||||||
s += h.slice(-2);
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function mergeKey(a:Uint8Array, s:Uint8Array) {
|
|
||||||
const y = new Uint8Array(a.byteLength + s.byteLength);
|
|
||||||
for (let i = 0; i < a.byteLength; i++) {
|
|
||||||
y[i] = a[i];
|
|
||||||
}
|
|
||||||
for (let i = 0; i < s.byteLength; i++) {
|
|
||||||
y[a.byteLength + i] = s[i];
|
|
||||||
}
|
|
||||||
const z = await crypto.subtle.digest('SHA-256', y);
|
|
||||||
return bytesToHex(new Uint8Array(z));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function toKey(v:string, salt:string) {
|
|
||||||
const a = stringToBytes(v);
|
|
||||||
const s = new TextEncoder().encode(salt);
|
|
||||||
return await mergeKey(a, s);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function toAddressKey(zeroExHex:string, salt:string) {
|
|
||||||
const a = addressToBytes(zeroExHex);
|
|
||||||
const s = new TextEncoder().encode(salt);
|
|
||||||
return await mergeKey(a, s);
|
|
||||||
}
|
|
||||||
|
|
||||||
const re_addrHex = /^0[xX][a-fA-F0-9]{40}$/;
|
|
||||||
function addressToBytes(s:string) {
|
|
||||||
if (!s.match(re_addrHex)) {
|
|
||||||
throw 'invalid address hex';
|
|
||||||
}
|
|
||||||
return stringToBytes(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
toKey,
|
|
||||||
toAddressKey,
|
|
||||||
mergeKey,
|
|
||||||
bytesToHex,
|
|
||||||
addressToBytes,
|
|
||||||
Addressable,
|
|
||||||
}
|
|
@ -1,58 +0,0 @@
|
|||||||
import { v4 as uuidv4 } from 'uuid';
|
|
||||||
import { Syncable } from './sync';
|
|
||||||
import { Store } from './store';
|
|
||||||
import { PubSub } from './transport';
|
|
||||||
|
|
||||||
function toIndexKey(id:string):string {
|
|
||||||
const d = Date.now();
|
|
||||||
return d + '_' + id + '_' + uuidv4();
|
|
||||||
}
|
|
||||||
|
|
||||||
const _re_indexKey = /^\d+_(.+)_[-\d\w]+$/;
|
|
||||||
function fromIndexKey(s:string):string {
|
|
||||||
const m = s.match(_re_indexKey);
|
|
||||||
if (m === null) {
|
|
||||||
throw 'Invalid index key';
|
|
||||||
}
|
|
||||||
return m[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
class Dispatcher {
|
|
||||||
|
|
||||||
idx: Array<string>
|
|
||||||
syncer: PubSub
|
|
||||||
store: Store
|
|
||||||
|
|
||||||
constructor(store:Store, syncer:PubSub) {
|
|
||||||
this.idx = new Array<string>()
|
|
||||||
this.syncer = syncer;
|
|
||||||
this.store = store;
|
|
||||||
}
|
|
||||||
|
|
||||||
public isDirty(): boolean {
|
|
||||||
return this.idx.length > 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public add(id:string, item:Syncable): string {
|
|
||||||
const v = item.toJSON();
|
|
||||||
const k = toIndexKey(id);
|
|
||||||
this.store.put(k, v, true);
|
|
||||||
localStorage.setItem(k, v);
|
|
||||||
this.idx.push(k);
|
|
||||||
return k;
|
|
||||||
}
|
|
||||||
|
|
||||||
public sync(offset:number): number {
|
|
||||||
let i = 0;
|
|
||||||
this.idx.forEach((k) => {
|
|
||||||
const v = localStorage.getItem(k);
|
|
||||||
const k_id = fromIndexKey(k);
|
|
||||||
this.syncer.pub(v); // this must block until guaranteed delivery
|
|
||||||
localStorage.removeItem(k);
|
|
||||||
i++;
|
|
||||||
});
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { Dispatcher, toIndexKey, fromIndexKey }
|
|
@ -1,5 +0,0 @@
|
|||||||
interface JSONSerializable {
|
|
||||||
toJSON(): string
|
|
||||||
}
|
|
||||||
|
|
||||||
export { JSONSerializable };
|
|
@ -1,5 +1,2 @@
|
|||||||
export { PGPSigner, PGPKeyStore, Signer, KeyStore } from './auth';
|
export { User } from './user';
|
||||||
export { ArgPair, Envelope, Syncable } from './sync';
|
export { Phone } from './phone';
|
||||||
export { User } from './assets/user';
|
|
||||||
export { Phone } from './assets/phone';
|
|
||||||
export { Config } from './config';
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { ArgPair, Syncable } from '../sync';
|
import { Syncable, Addressable, mergeKey } from 'crdt-meta';
|
||||||
import { Addressable, mergeKey } from '../digest';
|
|
||||||
|
|
||||||
class Phone extends Syncable implements Addressable {
|
class Phone extends Syncable implements Addressable {
|
||||||
|
|
@ -1,9 +0,0 @@
|
|||||||
import { Syncable } from './sync';
|
|
||||||
|
|
||||||
interface Store {
|
|
||||||
put(string, Syncable, boolean?)
|
|
||||||
get(string):Syncable
|
|
||||||
delete(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
export { Store };
|
|
@ -1,266 +0,0 @@
|
|||||||
import * as Automerge from 'automerge';
|
|
||||||
|
|
||||||
import { JSONSerializable } from './format';
|
|
||||||
|
|
||||||
import { Authoritative, Signer, PGPSigner, Signable, Signature } from './auth';
|
|
||||||
|
|
||||||
import { engineSpec, cryptoSpec, networkSpec, VersionedSpec } from './constants';
|
|
||||||
|
|
||||||
const fullSpec:VersionedSpec = {
|
|
||||||
name: 'cic',
|
|
||||||
version: '1',
|
|
||||||
ext: {
|
|
||||||
network: cryptoSpec,
|
|
||||||
engine: engineSpec,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
class Envelope {
|
|
||||||
|
|
||||||
o = fullSpec
|
|
||||||
|
|
||||||
constructor(payload:Object) {
|
|
||||||
this.set(payload);
|
|
||||||
}
|
|
||||||
|
|
||||||
public set(payload:Object) {
|
|
||||||
this.o['payload'] = payload
|
|
||||||
}
|
|
||||||
|
|
||||||
public get():string {
|
|
||||||
return this.o['payload'];
|
|
||||||
}
|
|
||||||
|
|
||||||
public toJSON() {
|
|
||||||
return JSON.stringify(this.o);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static fromJSON(s:string): Envelope {
|
|
||||||
const e = new Envelope(undefined);
|
|
||||||
e.o = JSON.parse(s);
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
|
|
||||||
public unwrap(): Syncable {
|
|
||||||
return Syncable.fromJSON(this.o['payload']);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class ArgPair {
|
|
||||||
|
|
||||||
k:string
|
|
||||||
v:any
|
|
||||||
|
|
||||||
constructor(k:string, v:any) {
|
|
||||||
this.k = k;
|
|
||||||
this.v = v;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class SignablePart implements Signable {
|
|
||||||
|
|
||||||
s: string
|
|
||||||
|
|
||||||
constructor(s:string) {
|
|
||||||
this.s = s;
|
|
||||||
}
|
|
||||||
|
|
||||||
public digest():string {
|
|
||||||
return this.s;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function orderDict(src) {
|
|
||||||
let dst;
|
|
||||||
if (Array.isArray(src)) {
|
|
||||||
dst = [];
|
|
||||||
src.forEach((v) => {
|
|
||||||
if (typeof(v) == 'object') {
|
|
||||||
v = orderDict(v);
|
|
||||||
}
|
|
||||||
dst.push(v);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
dst = {}
|
|
||||||
Object.keys(src).sort().forEach((k) => {
|
|
||||||
let v = src[k];
|
|
||||||
if (typeof(v) == 'object') {
|
|
||||||
v = orderDict(v);
|
|
||||||
}
|
|
||||||
dst[k] = v;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return dst;
|
|
||||||
}
|
|
||||||
|
|
||||||
class Syncable implements JSONSerializable, Authoritative, Signable {
|
|
||||||
|
|
||||||
id: string
|
|
||||||
timestamp: number
|
|
||||||
m: any // automerge object
|
|
||||||
e: Envelope
|
|
||||||
signer: Signer
|
|
||||||
onwrap: (string) => void
|
|
||||||
onauthenticate: (boolean) => void
|
|
||||||
|
|
||||||
// TODO: Move data to sub-object so timestamp, id, signature don't collide
|
|
||||||
constructor(id:string, v:Object) {
|
|
||||||
this.id = id;
|
|
||||||
const o = {
|
|
||||||
'id': id,
|
|
||||||
'timestamp': Math.floor(Date.now() / 1000),
|
|
||||||
'data': v,
|
|
||||||
}
|
|
||||||
//this.m = Automerge.from(v)
|
|
||||||
this.m = Automerge.from(o)
|
|
||||||
}
|
|
||||||
|
|
||||||
public setSigner(signer:Signer) {
|
|
||||||
this.signer = signer;
|
|
||||||
this.signer.onsign = (s) => {
|
|
||||||
this.wrap(s);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: To keep integrity, the non-link key/value pairs for each step also need to be hashed
|
|
||||||
public digest(): string {
|
|
||||||
const links = [];
|
|
||||||
Automerge.getAllChanges(this.m).forEach((ch:Object) => {
|
|
||||||
const op:Array<any> = ch['ops'];
|
|
||||||
ch['ops'].forEach((op:Array<Object>) => {
|
|
||||||
if (op['action'] == 'link') {
|
|
||||||
//console.log('op link', op);
|
|
||||||
links.push([op['obj'], op['value']]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
//return JSON.stringify(links);
|
|
||||||
const j = JSON.stringify(links);
|
|
||||||
return Buffer.from(j).toString('base64');
|
|
||||||
}
|
|
||||||
|
|
||||||
private wrap(s:any) {
|
|
||||||
this.m = Automerge.change(this.m, 'sign', (doc) => {
|
|
||||||
doc['signature'] = s;
|
|
||||||
});
|
|
||||||
this.e = new Envelope(this.toJSON());
|
|
||||||
console.log('wrappin s', s, typeof(s));
|
|
||||||
this.e.o['digest'] = s.digest;
|
|
||||||
if (this.onwrap !== undefined) {
|
|
||||||
this.onwrap(this.e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// private _verifyLoop(i:number, history:Array<any>, signable:Signable, result:boolean) {
|
|
||||||
// if (!result) {
|
|
||||||
// this.onauthenticate(false);
|
|
||||||
// return;
|
|
||||||
// } else if (history.length == 0) {
|
|
||||||
// this.onauthenticate(true);
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// const h = history.shift()
|
|
||||||
// if (i % 2 == 0) {
|
|
||||||
// i++;
|
|
||||||
// signable = {
|
|
||||||
// digest: () => {
|
|
||||||
// return Automerge.save(h.snapshot)
|
|
||||||
// },
|
|
||||||
// };
|
|
||||||
// this._verifyLoop(i, history, signable, true);
|
|
||||||
// } else {
|
|
||||||
// i++;
|
|
||||||
// const signature = h.snapshot['signature'];
|
|
||||||
// console.debug('signature', signature, signable.digest());
|
|
||||||
// this.signer.onverify = (v) => {
|
|
||||||
// this._verifyLoop(i, history, signable, v)
|
|
||||||
// }
|
|
||||||
// this.signer.verify(signable, signature);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // TODO: This should replay the graph and check signatures on each step
|
|
||||||
// public _authenticate(full:boolean=false) {
|
|
||||||
// let h = Automerge.getHistory(this.m);
|
|
||||||
// h.forEach((m) => {
|
|
||||||
// //console.debug(m.snapshot);
|
|
||||||
// });
|
|
||||||
// const signable = {
|
|
||||||
// digest: () => { return '' },
|
|
||||||
// }
|
|
||||||
// if (!full) {
|
|
||||||
// h = h.slice(h.length-2);
|
|
||||||
// }
|
|
||||||
// this._verifyLoop(0, h, signable, true);
|
|
||||||
// }
|
|
||||||
|
|
||||||
public authenticate(full:boolean=false) {
|
|
||||||
if (full) {
|
|
||||||
console.warn('only doing shallow authentication for now, sorry');
|
|
||||||
}
|
|
||||||
//console.log('authenticating', signable.digest());
|
|
||||||
//console.log('signature', this.m.signature);
|
|
||||||
this.signer.onverify = (v) => {
|
|
||||||
//this._verifyLoop(i, history, signable, v)
|
|
||||||
this.onauthenticate(v);
|
|
||||||
}
|
|
||||||
this.signer.verify(this.m.signature.digest, this.m.signature);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public sign() {
|
|
||||||
//this.signer.prepare(this);
|
|
||||||
this.signer.sign(this.digest());
|
|
||||||
}
|
|
||||||
|
|
||||||
public update(changes:Array<ArgPair>, changesDescription:string) {
|
|
||||||
this.m = Automerge.change(this.m, changesDescription, (m) => {
|
|
||||||
changes.forEach((c) => {
|
|
||||||
let path = c.k.split('.');
|
|
||||||
let target = m['data'];
|
|
||||||
while (path.length > 1) {
|
|
||||||
const part = path.shift();
|
|
||||||
target = target[part];
|
|
||||||
}
|
|
||||||
target[path[0]] = c.v;
|
|
||||||
});
|
|
||||||
m['timestamp'] = Math.floor(Date.now() / 1000);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public replace(o:Object, changesDescription:string) {
|
|
||||||
this.m = Automerge.change(this.m, changesDescription, (m) => {
|
|
||||||
Object.keys(o).forEach((k) => {
|
|
||||||
m['data'][k] = o[k];
|
|
||||||
});
|
|
||||||
Object.keys(m).forEach((k) => {
|
|
||||||
if (o[k] == undefined) {
|
|
||||||
delete m['data'][k];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
m['timestamp'] = Math.floor(Date.now() / 1000);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public merge(s:Syncable) {
|
|
||||||
this.m = Automerge.merge(s.m, this.m);
|
|
||||||
}
|
|
||||||
|
|
||||||
public toJSON(): string {
|
|
||||||
const s = Automerge.save(this.m);
|
|
||||||
const o = JSON.parse(s);
|
|
||||||
const oo = orderDict(o)
|
|
||||||
return JSON.stringify(oo);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static fromJSON(s:string): Syncable {
|
|
||||||
const doc = Automerge.load(s);
|
|
||||||
let y = new Syncable(doc['id'], {});
|
|
||||||
y.m = doc
|
|
||||||
return y
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { JSONSerializable, Syncable, ArgPair, Envelope };
|
|
@ -1,11 +0,0 @@
|
|||||||
interface SubConsumer {
|
|
||||||
post(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
interface PubSub {
|
|
||||||
pub(v:string):boolean
|
|
||||||
close()
|
|
||||||
}
|
|
||||||
|
|
||||||
export { PubSub, SubConsumer };
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
|||||||
import { ArgPair, Syncable } from '../sync';
|
import { Syncable, Addressable, toAddressKey } from 'crdt-meta';
|
||||||
import { Addressable, addressToBytes, bytesToHex, toAddressKey } from '../digest';
|
|
||||||
|
|
||||||
const keySalt = new TextEncoder().encode(':cic.person');
|
const keySalt = new TextEncoder().encode(':cic.person');
|
||||||
class User extends Syncable implements Addressable {
|
class User extends Syncable implements Addressable {
|
@ -1,50 +0,0 @@
|
|||||||
import * as Automerge from 'automerge';
|
|
||||||
import assert = require('assert');
|
|
||||||
|
|
||||||
import { Dispatcher, toIndexKey, fromIndexKey } from '../src/dispatch';
|
|
||||||
import { User } from '../src/assets/user';
|
|
||||||
import { Syncable, ArgPair } from '../src/sync';
|
|
||||||
|
|
||||||
import { MockSigner, MockStore } from './mock';
|
|
||||||
|
|
||||||
describe('basic', () => {
|
|
||||||
|
|
||||||
it('store', () => {
|
|
||||||
const store = new MockStore('s');
|
|
||||||
assert.equal(store.name, 's');
|
|
||||||
|
|
||||||
const mockSigner = new MockSigner();
|
|
||||||
const v = new Syncable('foo', {baz: 42});
|
|
||||||
v.setSigner(mockSigner);
|
|
||||||
store.put('foo', v);
|
|
||||||
const one = store.get('foo').toJSON();
|
|
||||||
const vv = new Syncable('bar', {baz: 666});
|
|
||||||
vv.setSigner(mockSigner);
|
|
||||||
assert.throws(() => {
|
|
||||||
store.put('foo', vv)
|
|
||||||
});
|
|
||||||
store.put('foo', vv, true);
|
|
||||||
const other = store.get('foo').toJSON();
|
|
||||||
assert.notEqual(one, other);
|
|
||||||
store.delete('foo');
|
|
||||||
assert.equal(store.get('foo'), undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('add_doc_to_dispatcher', () => {
|
|
||||||
const store = new MockStore('s');
|
|
||||||
//const syncer = new MockSyncer();
|
|
||||||
const dispatcher = new Dispatcher(store, undefined);
|
|
||||||
const user = new User('foo');
|
|
||||||
dispatcher.add(user.id, user);
|
|
||||||
assert(dispatcher.isDirty());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('dispatch_keyindex', () => {
|
|
||||||
const s = 'foo';
|
|
||||||
const k = toIndexKey(s);
|
|
||||||
const v = fromIndexKey(k);
|
|
||||||
assert.equal(s, v);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
});
|
|
@ -1,212 +0,0 @@
|
|||||||
import * as Automerge from 'automerge';
|
|
||||||
import assert = require('assert');
|
|
||||||
|
|
||||||
import * as pgp from 'openpgp';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
|
|
||||||
import { PGPSigner } from '../src/auth';
|
|
||||||
|
|
||||||
import { Syncable, ArgPair } from '../src/sync';
|
|
||||||
|
|
||||||
import { MockKeyStore, MockSigner } from './mock';
|
|
||||||
|
|
||||||
|
|
||||||
describe('sync', async () => {
|
|
||||||
it('sync_merge', () => {
|
|
||||||
const mockSigner = new MockSigner();
|
|
||||||
const s = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
s.setSigner(mockSigner);
|
|
||||||
const changePair = new ArgPair('xyzzy', 42);
|
|
||||||
s.update([changePair], 'ch-ch-cha-changes');
|
|
||||||
assert.equal(s.m.data['xyzzy'], 42)
|
|
||||||
assert.equal(s.m.data['bar'], 'baz')
|
|
||||||
assert.equal(s.m['id'], 'foo')
|
|
||||||
assert.equal(Automerge.getHistory(s.m).length, 2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sync_serialize', () => {
|
|
||||||
const mockSigner = new MockSigner();
|
|
||||||
const s = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
s.setSigner(mockSigner);
|
|
||||||
const j = s.toJSON();
|
|
||||||
const ss = Syncable.fromJSON(j);
|
|
||||||
assert.equal(ss.m['id'], 'foo');
|
|
||||||
assert.equal(ss.m['data']['bar'], 'baz');
|
|
||||||
assert.equal(Automerge.getHistory(ss.m).length, 1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sync_sign_and_wrap', () => {
|
|
||||||
const mockSigner = new MockSigner();
|
|
||||||
const s = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
s.setSigner(mockSigner);
|
|
||||||
s.onwrap = (e) => {
|
|
||||||
const j = e.toJSON();
|
|
||||||
const v = JSON.parse(j);
|
|
||||||
assert.deepEqual(v.payload, e.o.payload);
|
|
||||||
|
|
||||||
}
|
|
||||||
s.sign();
|
|
||||||
});
|
|
||||||
it('sync_verify_success', async () => {
|
|
||||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
|
|
||||||
const pks = await pgp.key.readArmored(pksa);
|
|
||||||
await pks.keys[0].decrypt('merman');
|
|
||||||
await pks.keys[1].decrypt('beastman');
|
|
||||||
|
|
||||||
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
|
|
||||||
const pubks = await pgp.key.readArmored(pubksa);
|
|
||||||
|
|
||||||
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
|
|
||||||
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
|
|
||||||
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
|
|
||||||
|
|
||||||
const oneSigner = new PGPSigner(oneStore);
|
|
||||||
const twoSigner = new PGPSigner(twoStore);
|
|
||||||
const threeSigner = new PGPSigner(threeStore);
|
|
||||||
|
|
||||||
const x = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
x.setSigner(oneSigner);
|
|
||||||
|
|
||||||
// TODO: make this look better
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
let updateData = new ArgPair('bar', 'xyzzy');
|
|
||||||
x.update([updateData], 'change one');
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
x.setSigner(twoSigner);
|
|
||||||
updateData = new ArgPair('bar', 42);
|
|
||||||
x.update([updateData], 'change two');
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
const p = e.unwrap();
|
|
||||||
p.setSigner(twoSigner);
|
|
||||||
p.onauthenticate = (v) => {
|
|
||||||
assert(v);
|
|
||||||
}
|
|
||||||
p.authenticate();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
};
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sync_verify_fail', async () => {
|
|
||||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
|
|
||||||
const pks = await pgp.key.readArmored(pksa);
|
|
||||||
await pks.keys[0].decrypt('merman');
|
|
||||||
await pks.keys[1].decrypt('beastman');
|
|
||||||
|
|
||||||
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
|
|
||||||
const pubks = await pgp.key.readArmored(pubksa);
|
|
||||||
|
|
||||||
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
|
|
||||||
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
|
|
||||||
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
|
|
||||||
|
|
||||||
const oneSigner = new PGPSigner(oneStore);
|
|
||||||
const twoSigner = new PGPSigner(twoStore);
|
|
||||||
const threeSigner = new PGPSigner(threeStore);
|
|
||||||
|
|
||||||
const x = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
x.setSigner(oneSigner);
|
|
||||||
|
|
||||||
// TODO: make this look better
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
let updateData = new ArgPair('bar', 'xyzzy');
|
|
||||||
x.update([updateData], 'change one');
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
x.setSigner(twoSigner);
|
|
||||||
updateData = new ArgPair('bar', 42);
|
|
||||||
x.update([updateData], 'change two');
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
const p = e.unwrap();
|
|
||||||
p.setSigner(threeSigner);
|
|
||||||
p.onauthenticate = (v) => {
|
|
||||||
assert(!v);
|
|
||||||
}
|
|
||||||
p.authenticate();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
};
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
xit('sync_verify_shallow_tricked', async () => {
|
|
||||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc');
|
|
||||||
const pks = await pgp.key.readArmored(pksa);
|
|
||||||
await pks.keys[0].decrypt('merman');
|
|
||||||
await pks.keys[1].decrypt('beastman');
|
|
||||||
|
|
||||||
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc');
|
|
||||||
const pubks = await pgp.key.readArmored(pubksa);
|
|
||||||
|
|
||||||
const oneStore = new MockKeyStore(pks.keys[0], pubks.keys);
|
|
||||||
const twoStore = new MockKeyStore(pks.keys[1], pubks.keys);
|
|
||||||
const threeStore = new MockKeyStore(pks.keys[2], [pubks.keys[0], pubks.keys[2]]);
|
|
||||||
|
|
||||||
const oneSigner = new PGPSigner(oneStore);
|
|
||||||
const twoSigner = new PGPSigner(twoStore);
|
|
||||||
const threeSigner = new PGPSigner(threeStore);
|
|
||||||
|
|
||||||
const x = new Syncable('foo', {
|
|
||||||
bar: 'baz',
|
|
||||||
});
|
|
||||||
x.setSigner(twoSigner);
|
|
||||||
|
|
||||||
// TODO: make this look better
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
let updateData = new ArgPair('bar', 'xyzzy');
|
|
||||||
x.update([updateData], 'change one');
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
updateData = new ArgPair('bar', 42);
|
|
||||||
x.update([updateData], 'change two');
|
|
||||||
x.setSigner(oneSigner);
|
|
||||||
|
|
||||||
x.onwrap = (e) => {
|
|
||||||
const p = e.unwrap();
|
|
||||||
p.setSigner(threeSigner);
|
|
||||||
p.onauthenticate = (v) => {
|
|
||||||
assert(v);
|
|
||||||
p.onauthenticate = (v) => {
|
|
||||||
assert(!v);
|
|
||||||
}
|
|
||||||
p.authenticate(true);
|
|
||||||
}
|
|
||||||
p.authenticate();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
};
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
}
|
|
||||||
|
|
||||||
x.sign();
|
|
||||||
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,14 +0,0 @@
|
|||||||
import * as assert from 'assert';
|
|
||||||
|
|
||||||
import { MockPubSub, MockConsumer } from './mock';
|
|
||||||
|
|
||||||
describe('transport', () => {
|
|
||||||
it('pub_sub', () => {
|
|
||||||
const c = new MockConsumer();
|
|
||||||
const ps = new MockPubSub('foo', c);
|
|
||||||
ps.pub('foo');
|
|
||||||
ps.pub('bar');
|
|
||||||
ps.flush();
|
|
||||||
assert.deepEqual(c.omnoms, ['foo', 'bar']);
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,46 +0,0 @@
|
|||||||
import assert = require('assert');
|
|
||||||
import pgp = require('openpgp');
|
|
||||||
import crypto = require('crypto');
|
|
||||||
|
|
||||||
import { Syncable, ArgPair } from '../src/sync';
|
|
||||||
|
|
||||||
import { MockKeyStore, MockSignable } from './mock';
|
|
||||||
|
|
||||||
import { PGPSigner } from '../src/auth';
|
|
||||||
|
|
||||||
|
|
||||||
describe('auth', async () => {
|
|
||||||
await it('digest', async () => {
|
|
||||||
const opts = {
|
|
||||||
userIds: [
|
|
||||||
{
|
|
||||||
name: 'John Marston',
|
|
||||||
email: 'red@dead.com',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
numBits: 2048,
|
|
||||||
passphrase: 'foo',
|
|
||||||
};
|
|
||||||
const pkgen = await pgp.generateKey(opts);
|
|
||||||
const pka = pkgen.privateKeyArmored;
|
|
||||||
const pks = await pgp.key.readArmored(pka);
|
|
||||||
await pks.keys[0].decrypt('foo');
|
|
||||||
const pubka = pkgen.publicKeyArmored;
|
|
||||||
const pubks = await pgp.key.readArmored(pubka);
|
|
||||||
const keyStore = new MockKeyStore(pks.keys[0], pubks.keys);
|
|
||||||
const s = new PGPSigner(keyStore);
|
|
||||||
|
|
||||||
const message = await pgp.cleartext.fromText('foo');
|
|
||||||
s.onverify = (ok) => {
|
|
||||||
assert(ok);
|
|
||||||
}
|
|
||||||
s.onsign = (signature) => {
|
|
||||||
s.onverify((v) => {
|
|
||||||
console.log('bar', v);
|
|
||||||
});
|
|
||||||
s.verify('foo', signature);
|
|
||||||
}
|
|
||||||
|
|
||||||
await s.sign('foo');
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,47 +0,0 @@
|
|||||||
import * as assert from 'assert';
|
|
||||||
import * as pgp from 'openpgp';
|
|
||||||
|
|
||||||
import { Dispatcher } from '../src/dispatch';
|
|
||||||
import { User } from '../src/assets/user';
|
|
||||||
import { PGPSigner, KeyStore } from '../src/auth';
|
|
||||||
import { SubConsumer } from '../src/transport';
|
|
||||||
|
|
||||||
import { MockStore, MockPubSub, MockConsumer, MockKeyStore } from './mock';
|
|
||||||
|
|
||||||
async function createKeyStore() {
|
|
||||||
const opts = {
|
|
||||||
userIds: [
|
|
||||||
{
|
|
||||||
name: 'John Marston',
|
|
||||||
email: 'red@dead.com',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
numBits: 2048,
|
|
||||||
passphrase: 'foo',
|
|
||||||
};
|
|
||||||
const pkgen = await pgp.generateKey(opts);
|
|
||||||
const pka = pkgen.privateKeyArmored;
|
|
||||||
const pks = await pgp.key.readArmored(pka);
|
|
||||||
await pks.keys[0].decrypt('foo');
|
|
||||||
return new MockKeyStore(pks.keys[0], []);
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('fullchain', async () => {
|
|
||||||
it('dispatch_and_publish_user', async () => {
|
|
||||||
const g = await createKeyStore();
|
|
||||||
const n = new PGPSigner(g);
|
|
||||||
const u = new User('u1', {});
|
|
||||||
u.setSigner(n);
|
|
||||||
u.setName('Nico', 'Bellic');
|
|
||||||
const s = new MockStore('fooStore');
|
|
||||||
const c = new MockConsumer();
|
|
||||||
const p = new MockPubSub('fooPubSub', c);
|
|
||||||
const d = new Dispatcher(s, p);
|
|
||||||
u.onwrap = (e) => {
|
|
||||||
d.add(u.id, e);
|
|
||||||
d.sync(0);
|
|
||||||
assert.equal(p.pubs.length, 1);
|
|
||||||
};
|
|
||||||
u.sign();
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,150 +0,0 @@
|
|||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
import { Signable, Signature, KeyStore } from '../src/auth';
|
|
||||||
import { Store } from '../src/store';
|
|
||||||
import { PubSub, SubConsumer } from '../src/transport';
|
|
||||||
import { Syncable } from '../src/sync';
|
|
||||||
|
|
||||||
class MockStore implements Store {
|
|
||||||
|
|
||||||
contents: Object
|
|
||||||
name: string
|
|
||||||
|
|
||||||
constructor(name:string) {
|
|
||||||
this.name = name;
|
|
||||||
this.contents = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
public put(k:string, v:Syncable, existsOk = false) {
|
|
||||||
if (!existsOk && this.contents[k] !== undefined) {
|
|
||||||
throw '"' + k + '" already exists in store ' + this.name;
|
|
||||||
}
|
|
||||||
this.contents[k] = v;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get(k:string): Syncable {
|
|
||||||
return this.contents[k];
|
|
||||||
}
|
|
||||||
|
|
||||||
public delete(k:string) {
|
|
||||||
delete this.contents[k];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MockSigner {
|
|
||||||
onsign: (string) => void
|
|
||||||
onverify: (boolean) => void
|
|
||||||
public verify(src:string, signature:Signature) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public sign(s:string):boolean {
|
|
||||||
this.onsign('there would be a signature here');
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public prepare(m:Signable):boolean {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public fingerprint():string {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MockConsumer implements SubConsumer {
|
|
||||||
|
|
||||||
omnoms: Array<string>
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.omnoms = Array<string>();
|
|
||||||
}
|
|
||||||
|
|
||||||
public post(v:string) {
|
|
||||||
this.omnoms.push(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MockPubSub implements PubSub {
|
|
||||||
|
|
||||||
pubs: Array<string>
|
|
||||||
consumer: SubConsumer
|
|
||||||
|
|
||||||
constructor(name:string, consumer:SubConsumer) {
|
|
||||||
this.pubs = Array<string>();
|
|
||||||
this.consumer = consumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
public pub(v:string): boolean {
|
|
||||||
this.pubs.push(v);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public flush() {
|
|
||||||
while (this.pubs.length > 0) {
|
|
||||||
const s = this.pubs.shift();
|
|
||||||
this.consumer.post(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public close() {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MockSignable implements Signable {
|
|
||||||
|
|
||||||
src: string
|
|
||||||
dst: string
|
|
||||||
|
|
||||||
constructor(src:string) {
|
|
||||||
this.src = src;
|
|
||||||
}
|
|
||||||
|
|
||||||
public digest():string {
|
|
||||||
const h = crypto.createHash('sha256');
|
|
||||||
h.update(this.src);
|
|
||||||
this.dst= h.digest('hex');
|
|
||||||
return this.dst;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
class MockKeyStore implements KeyStore {
|
|
||||||
|
|
||||||
pk: any
|
|
||||||
pubks: Array<any>
|
|
||||||
|
|
||||||
constructor(pk:any, pubks:Array<any>) {
|
|
||||||
this.pk = pk;
|
|
||||||
this.pubks = pubks;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getPrivateKey(): any {
|
|
||||||
return this.pk;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getTrustedKeys(): Array<any> {
|
|
||||||
return this.pubks;
|
|
||||||
}
|
|
||||||
|
|
||||||
public getTrustedActiveKeys(): Array<any> {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
public getEncryptKeys(): Array<any> {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
public getFingerprint(): string {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
MockStore,
|
|
||||||
MockPubSub,
|
|
||||||
MockConsumer,
|
|
||||||
MockSignable,
|
|
||||||
MockKeyStore,
|
|
||||||
MockSigner,
|
|
||||||
};
|
|
@ -1,13 +1,10 @@
|
|||||||
import Automerge = require('automerge');
|
|
||||||
import assert = require('assert');
|
import assert = require('assert');
|
||||||
import fs = require('fs');
|
import fs = require('fs');
|
||||||
import pgp = require('openpgp');
|
import pgp = require('openpgp');
|
||||||
import sqlite = require('sqlite3');
|
import sqlite = require('sqlite3');
|
||||||
|
|
||||||
import * as handlers from '../scripts/server/handlers';
|
import * as handlers from '../scripts/server/handlers';
|
||||||
import { Envelope, Syncable, ArgPair } from '../src/sync';
|
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer, SqliteAdapter } from 'crdt-meta';
|
||||||
import { PGPKeyStore, PGPSigner, KeyStore, Signer } from '../src/auth';
|
|
||||||
import { SqliteAdapter } from '../src/db';
|
|
||||||
|
|
||||||
function createKeystore() {
|
function createKeystore() {
|
||||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
|
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
|
||||||
|
@ -20,7 +20,7 @@ def define_account_tx_metadata(user: Account):
|
|||||||
)
|
)
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
account_metadata = get_cached_data(key=key)
|
account_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ def get_cached_operational_balance(blockchain_address: str):
|
|||||||
"""
|
"""
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cached_balance = get_cached_data(key=key)
|
cached_balance = get_cached_data(key=key)
|
||||||
if cached_balance:
|
if cached_balance:
|
||||||
|
@ -38,3 +38,13 @@ class MetadataStoreError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SeppukuError(Exception):
|
||||||
|
"""Exception base class for all errors that should cause system shutdown"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InitializationError(Exception):
|
||||||
|
"""Exception raised when initialization state is insufficient to run component"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ class MetadataRequestsHandler(Metadata):
|
|||||||
metadata_http_error_handler(result=result)
|
metadata_http_error_handler(result=result)
|
||||||
response_data = result.content
|
response_data = result.content
|
||||||
data = json.loads(response_data.decode('utf-8'))
|
data = json.loads(response_data.decode('utf-8'))
|
||||||
if result.status_code == 200 and self.cic_type == 'cic.person':
|
if result.status_code == 200 and self.cic_type == ':cic.person':
|
||||||
person = Person()
|
person = Person()
|
||||||
deserialized_person = person.deserialize(person_data=json.loads(data))
|
deserialized_person = person.deserialize(person_data=json.loads(data))
|
||||||
data = json.dumps(deserialized_person.serialize())
|
data = json.dumps(deserialized_person.serialize())
|
||||||
|
@ -9,4 +9,4 @@ from .base import MetadataRequestsHandler
|
|||||||
class PersonMetadata(MetadataRequestsHandler):
|
class PersonMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type='cic.person', identifier=identifier)
|
super().__init__(cic_type=':cic.person', identifier=identifier)
|
||||||
|
@ -10,4 +10,4 @@ from .base import MetadataRequestsHandler
|
|||||||
class PhonePointerMetadata(MetadataRequestsHandler):
|
class PhonePointerMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type='cic.msisdn', identifier=identifier)
|
super().__init__(cic_type=':cic.phone', identifier=identifier)
|
||||||
|
@ -48,10 +48,9 @@ def define_response_with_content(headers: list, response: str) -> tuple:
|
|||||||
content_length_header = ('Content-Length', str(content_length))
|
content_length_header = ('Content-Length', str(content_length))
|
||||||
# check for content length defaulted to zero in error headers
|
# check for content length defaulted to zero in error headers
|
||||||
for position, header in enumerate(headers):
|
for position, header in enumerate(headers):
|
||||||
if header[0] == 'Content-Length':
|
if 'Content-Length' in header:
|
||||||
headers[position] = content_length_header
|
headers.pop(position)
|
||||||
else:
|
headers.append(content_length_header)
|
||||||
headers.append(content_length_header)
|
|
||||||
return response_bytes, headers
|
return response_bytes, headers
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
# third party imports
|
# third party imports
|
||||||
import celery
|
import celery
|
||||||
from sqlalchemy import desc
|
from sqlalchemy import desc
|
||||||
|
from cic_eth.api import Api
|
||||||
from tinydb.table import Document
|
from tinydb.table import Document
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -15,7 +16,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca
|
|||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db.models.account import AccountStatus, Account
|
from cic_ussd.db.models.account import AccountStatus, Account
|
||||||
from cic_ussd.db.models.ussd_session import UssdSession
|
from cic_ussd.db.models.ussd_session import UssdSession
|
||||||
from cic_ussd.error import MetadataNotFoundError
|
from cic_ussd.error import MetadataNotFoundError, SeppukuError
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
||||||
from cic_ussd.phone_number import get_user_by_phone_number
|
from cic_ussd.phone_number import get_user_by_phone_number
|
||||||
@ -28,6 +29,38 @@ from cic_types.models.person import generate_metadata_pointer, get_contact_data_
|
|||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_token_data():
|
||||||
|
chain_str = Chain.spec.__str__()
|
||||||
|
cic_eth_api = Api(chain_str=chain_str)
|
||||||
|
default_token_request_task = cic_eth_api.default_token()
|
||||||
|
default_token_data = default_token_request_task.get()
|
||||||
|
return default_token_data
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve_token_symbol(chain_str: str = Chain.spec.__str__()):
|
||||||
|
"""
|
||||||
|
:param chain_str:
|
||||||
|
:type chain_str:
|
||||||
|
:return:
|
||||||
|
:rtype:
|
||||||
|
"""
|
||||||
|
cache_key = create_cached_data_key(
|
||||||
|
identifier=chain_str.encode('utf-8'),
|
||||||
|
salt=':cic.default_token_data'
|
||||||
|
)
|
||||||
|
cached_data = get_cached_data(key=cache_key)
|
||||||
|
if cached_data:
|
||||||
|
default_token_data = json.loads(cached_data)
|
||||||
|
return default_token_data.get('symbol')
|
||||||
|
else:
|
||||||
|
logg.warning('Cached default token data not found. Attempting retrieval from default token API')
|
||||||
|
default_token_data = get_default_token_data()
|
||||||
|
if default_token_data:
|
||||||
|
return default_token_data.get('symbol')
|
||||||
|
else:
|
||||||
|
raise SeppukuError(f'Could not retrieve default token for: {chain_str}')
|
||||||
|
|
||||||
|
|
||||||
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
|
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
|
||||||
"""
|
"""
|
||||||
This method provides translation for all ussd menu entries that follow the pin authorization pattern.
|
This method provides translation for all ussd menu entries that follow the pin authorization pattern.
|
||||||
@ -73,7 +106,9 @@ def process_exit_insufficient_balance(display_key: str, user: Account, ussd_sess
|
|||||||
# compile response data
|
# compile response data
|
||||||
user_input = ussd_session.get('user_input').split('*')[-1]
|
user_input = ussd_session.get('user_input').split('*')[-1]
|
||||||
transaction_amount = to_wei(value=int(user_input))
|
transaction_amount = to_wei(value=int(user_input))
|
||||||
token_symbol = 'SRF'
|
|
||||||
|
# get default data
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
|
|
||||||
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
||||||
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
||||||
@ -102,7 +137,7 @@ def process_exit_successful_transaction(display_key: str, user: Account, ussd_se
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
|
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
|
||||||
token_symbol = 'SRF'
|
token_symbol = retrieve_token_symbol()
|
||||||
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
||||||
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
||||||
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
||||||
@ -137,7 +172,7 @@ def process_transaction_pin_authorization(user: Account, display_key: str, ussd_
|
|||||||
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
||||||
tx_sender_information = define_account_tx_metadata(user=user)
|
tx_sender_information = define_account_tx_metadata(user=user)
|
||||||
|
|
||||||
token_symbol = 'SRF'
|
token_symbol = retrieve_token_symbol()
|
||||||
user_input = ussd_session.get('session_data').get('transaction_amount')
|
user_input = ussd_session.get('session_data').get('transaction_amount')
|
||||||
transaction_amount = to_wei(value=int(user_input))
|
transaction_amount = to_wei(value=int(user_input))
|
||||||
logg.debug('Requires integration to determine user tokens.')
|
logg.debug('Requires integration to determine user tokens.')
|
||||||
@ -168,18 +203,18 @@ def process_account_balances(user: Account, display_key: str, ussd_session: dict
|
|||||||
logg.debug('Requires call to retrieve tax and bonus amounts')
|
logg.debug('Requires call to retrieve tax and bonus amounts')
|
||||||
tax = ''
|
tax = ''
|
||||||
bonus = ''
|
bonus = ''
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
return translation_for(
|
return translation_for(
|
||||||
key=display_key,
|
key=display_key,
|
||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
operational_balance=operational_balance,
|
operational_balance=operational_balance,
|
||||||
tax=tax,
|
tax=tax,
|
||||||
bonus=bonus,
|
bonus=bonus,
|
||||||
token_symbol='SRF'
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def format_transactions(transactions: list, preferred_language: str):
|
def format_transactions(transactions: list, preferred_language: str, token_symbol: str):
|
||||||
|
|
||||||
formatted_transactions = ''
|
formatted_transactions = ''
|
||||||
if len(transactions) > 0:
|
if len(transactions) > 0:
|
||||||
@ -190,7 +225,7 @@ def format_transactions(transactions: list, preferred_language: str):
|
|||||||
timestamp = transaction.get('timestamp')
|
timestamp = transaction.get('timestamp')
|
||||||
action_tag = transaction.get('action_tag')
|
action_tag = transaction.get('action_tag')
|
||||||
direction = transaction.get('direction')
|
direction = transaction.get('direction')
|
||||||
token_symbol = 'SRF'
|
token_symbol = token_symbol
|
||||||
|
|
||||||
if action_tag == 'SENT' or action_tag == 'ULITUMA':
|
if action_tag == 'SENT' or action_tag == 'ULITUMA':
|
||||||
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
|
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
|
||||||
@ -214,7 +249,7 @@ def process_display_user_metadata(user: Account, display_key: str):
|
|||||||
"""
|
"""
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key)
|
user_metadata = get_cached_data(key)
|
||||||
if user_metadata:
|
if user_metadata:
|
||||||
@ -251,9 +286,11 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
"""
|
"""
|
||||||
# retrieve cached statement
|
# retrieve cached statement
|
||||||
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
|
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
|
||||||
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
|
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
|
||||||
transactions = get_cached_data(key=key)
|
transactions = get_cached_data(key=key)
|
||||||
|
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
|
|
||||||
first_transaction_set = []
|
first_transaction_set = []
|
||||||
middle_transaction_set = []
|
middle_transaction_set = []
|
||||||
last_transaction_set = []
|
last_transaction_set = []
|
||||||
@ -277,7 +314,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
first_transaction_set=format_transactions(
|
first_transaction_set=format_transactions(
|
||||||
transactions=first_transaction_set,
|
transactions=first_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif display_key == 'ussd.kenya.middle_transaction_set':
|
elif display_key == 'ussd.kenya.middle_transaction_set':
|
||||||
@ -286,7 +324,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
middle_transaction_set=format_transactions(
|
middle_transaction_set=format_transactions(
|
||||||
transactions=middle_transaction_set,
|
transactions=middle_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -296,7 +335,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
last_transaction_set=format_transactions(
|
last_transaction_set=format_transactions(
|
||||||
transactions=last_transaction_set,
|
transactions=last_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -312,18 +352,19 @@ def process_start_menu(display_key: str, user: Account):
|
|||||||
:return: Corresponding translation text response
|
:return: Corresponding translation text response
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
chain_str = Chain.spec.__str__()
|
chain_str = Chain.spec.__str__()
|
||||||
blockchain_address = user.blockchain_address
|
blockchain_address = user.blockchain_address
|
||||||
balance_manager = BalanceManager(address=blockchain_address,
|
balance_manager = BalanceManager(address=blockchain_address,
|
||||||
chain_str=chain_str,
|
chain_str=chain_str,
|
||||||
token_symbol='SRF')
|
token_symbol=token_symbol)
|
||||||
|
|
||||||
# get balances synchronously for display on start menu
|
# get balances synchronously for display on start menu
|
||||||
balances_data = balance_manager.get_balances()
|
balances_data = balance_manager.get_balances()
|
||||||
|
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(balances_data))
|
cache_data(key=key, data=json.dumps(balances_data))
|
||||||
|
|
||||||
@ -340,9 +381,6 @@ def process_start_menu(display_key: str, user: Account):
|
|||||||
# retrieve and cache account's statement
|
# retrieve and cache account's statement
|
||||||
retrieve_account_statement(blockchain_address=blockchain_address)
|
retrieve_account_statement(blockchain_address=blockchain_address)
|
||||||
|
|
||||||
# TODO [Philip]: figure out how to get token symbol from a metadata layer of sorts.
|
|
||||||
token_symbol = 'SRF'
|
|
||||||
|
|
||||||
return translation_for(
|
return translation_for(
|
||||||
key=display_key,
|
key=display_key,
|
||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
@ -375,6 +413,13 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
|
|||||||
:return: A ussd menu's corresponding text value.
|
:return: A ussd menu's corresponding text value.
|
||||||
:rtype: Document
|
:rtype: Document
|
||||||
"""
|
"""
|
||||||
|
# retrieve metadata before any transition
|
||||||
|
key = generate_metadata_pointer(
|
||||||
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
|
cic_type=':cic.person'
|
||||||
|
)
|
||||||
|
person_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
if ussd_session:
|
if ussd_session:
|
||||||
if user_input == "0":
|
if user_input == "0":
|
||||||
return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
|
return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
|
||||||
@ -385,12 +430,6 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
|
|||||||
if user.has_valid_pin():
|
if user.has_valid_pin():
|
||||||
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
|
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
|
||||||
|
|
||||||
key = generate_metadata_pointer(
|
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
|
||||||
cic_type='cic.person'
|
|
||||||
)
|
|
||||||
person_metadata = get_cached_data(key=key)
|
|
||||||
|
|
||||||
if last_ussd_session:
|
if last_ussd_session:
|
||||||
# get last state
|
# get last state
|
||||||
last_state = last_ussd_session.state
|
last_state = last_ussd_session.state
|
||||||
|
73
apps/cic-ussd/cic_ussd/runnable/daemons/cic_user_server.py
Normal file
73
apps/cic-ussd/cic_ussd/runnable/daemons/cic_user_server.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
"""
|
||||||
|
This module handles requests originating from CICADA or any other management client for custodial wallets, processing
|
||||||
|
requests offering control of user account states to a staff behind the client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from confini import Config
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_ussd.db import dsn_from_config
|
||||||
|
from cic_ussd.db.models.base import SessionBase
|
||||||
|
from cic_ussd.operations import define_response_with_content
|
||||||
|
from cic_ussd.requests import (get_request_endpoint,
|
||||||
|
get_query_parameters,
|
||||||
|
process_pin_reset_requests,
|
||||||
|
process_locked_accounts_requests)
|
||||||
|
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||||
|
args = exportable_parser.parse_args()
|
||||||
|
|
||||||
|
# define log levels
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# parse config
|
||||||
|
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||||
|
|
||||||
|
# set up db
|
||||||
|
data_source_name = dsn_from_config(config)
|
||||||
|
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
|
# create session for the life time of http request
|
||||||
|
SessionBase.session = SessionBase.create_session()
|
||||||
|
|
||||||
|
|
||||||
|
# handle requests from CICADA
|
||||||
|
def application(env, start_response):
|
||||||
|
"""Loads python code for application to be accessible over web server
|
||||||
|
:param env: Object containing server and request information
|
||||||
|
:type env: dict
|
||||||
|
:param start_response: Callable to define responses.
|
||||||
|
:type start_response: any
|
||||||
|
:return: a list containing a bytes representation of the response object
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
# define headers
|
||||||
|
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
||||||
|
headers = [('Content-Type', 'text/plain')]
|
||||||
|
|
||||||
|
if get_request_endpoint(env) == '/pin':
|
||||||
|
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
|
||||||
|
phone_number = quote_plus(phone_number)
|
||||||
|
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
|
||||||
|
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
|
||||||
|
SessionBase.session.close()
|
||||||
|
start_response(message, headers)
|
||||||
|
return [response_bytes]
|
||||||
|
|
||||||
|
# handle requests for locked accounts
|
||||||
|
response, message = process_locked_accounts_requests(env=env)
|
||||||
|
response_bytes, headers = define_response_with_content(headers=headers, response=response)
|
||||||
|
start_response(message, headers)
|
||||||
|
SessionBase.session.close()
|
||||||
|
return [response_bytes]
|
||||||
|
|
@ -1,25 +1,23 @@
|
|||||||
"""Functions defining WSGI interaction with external http requests
|
"""This module handles requests originating from the ussd service provider.
|
||||||
Defines an application function essential for the uWSGI python loader to run th python application code.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# standard imports
|
# standard imports
|
||||||
import argparse
|
|
||||||
import celery
|
|
||||||
import i18n
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import redis
|
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
from confini import Config
|
import celery
|
||||||
|
import i18n
|
||||||
|
import redis
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from urllib.parse import quote_plus
|
from confini import Config
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db import dsn_from_config
|
from cic_ussd.db import dsn_from_config
|
||||||
from cic_ussd.db.models.base import SessionBase
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.encoder import PasswordEncoder
|
from cic_ussd.encoder import PasswordEncoder
|
||||||
|
from cic_ussd.error import InitializationError
|
||||||
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata.signer import Signer
|
from cic_ussd.metadata.signer import Signer
|
||||||
@ -28,34 +26,17 @@ from cic_ussd.operations import (define_response_with_content,
|
|||||||
process_menu_interaction_requests,
|
process_menu_interaction_requests,
|
||||||
define_multilingual_responses)
|
define_multilingual_responses)
|
||||||
from cic_ussd.phone_number import process_phone_number
|
from cic_ussd.phone_number import process_phone_number
|
||||||
from cic_ussd.redis import InMemoryStore
|
from cic_ussd.processor import get_default_token_data
|
||||||
|
from cic_ussd.redis import cache_data, create_cached_data_key, InMemoryStore
|
||||||
from cic_ussd.requests import (get_request_endpoint,
|
from cic_ussd.requests import (get_request_endpoint,
|
||||||
get_request_method,
|
get_request_method)
|
||||||
get_query_parameters,
|
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||||
process_locked_accounts_requests,
|
|
||||||
process_pin_reset_requests)
|
|
||||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||||
from cic_ussd.state_machine import UssdStateMachine
|
from cic_ussd.state_machine import UssdStateMachine
|
||||||
from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \
|
from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \
|
||||||
validate_presence
|
validate_presence
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
args = exportable_parser.parse_args()
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
config_directory = '/usr/local/etc/cic-ussd/'
|
|
||||||
|
|
||||||
# define arguments
|
|
||||||
arg_parser = argparse.ArgumentParser()
|
|
||||||
arg_parser.add_argument('-c', type=str, default=config_directory, help='config directory.')
|
|
||||||
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
|
|
||||||
arg_parser.add_argument('-v', action='store_true', help='be verbose')
|
|
||||||
arg_parser.add_argument('-vv', action='store_true', help='be more verbose')
|
|
||||||
arg_parser.add_argument('--env-prefix',
|
|
||||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
|
||||||
dest='env_prefix',
|
|
||||||
type=str,
|
|
||||||
help='environment prefix for variables to overwrite configuration')
|
|
||||||
args = arg_parser.parse_args()
|
|
||||||
|
|
||||||
# define log levels
|
# define log levels
|
||||||
if args.vv:
|
if args.vv:
|
||||||
@ -69,7 +50,14 @@ config.process()
|
|||||||
config.censor('PASSWORD', 'DATABASE')
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||||
|
|
||||||
# initialize elements
|
# set up db
|
||||||
|
data_source_name = dsn_from_config(config)
|
||||||
|
SessionBase.connect(data_source_name,
|
||||||
|
pool_size=int(config.get('DATABASE_POOL_SIZE')),
|
||||||
|
debug=config.true('DATABASE_DEBUG'))
|
||||||
|
# create session for the life time of http request
|
||||||
|
SessionBase.session = SessionBase.create_session()
|
||||||
|
|
||||||
# set up translations
|
# set up translations
|
||||||
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
|
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
|
||||||
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
|
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
|
||||||
@ -82,12 +70,6 @@ ussd_menu_db = create_local_file_data_stores(file_location=config.get('USSD_MENU
|
|||||||
table_name='ussd_menu')
|
table_name='ussd_menu')
|
||||||
UssdMenu.ussd_menu_db = ussd_menu_db
|
UssdMenu.ussd_menu_db = ussd_menu_db
|
||||||
|
|
||||||
# set up db
|
|
||||||
data_source_name = dsn_from_config(config)
|
|
||||||
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
|
||||||
# create session for the life time of http request
|
|
||||||
SessionBase.session = SessionBase.create_session()
|
|
||||||
|
|
||||||
# define universal redis cache access
|
# define universal redis cache access
|
||||||
InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
||||||
port=config.get('REDIS_PORT'),
|
port=config.get('REDIS_PORT'),
|
||||||
@ -127,6 +109,20 @@ Chain.spec = chain_spec
|
|||||||
UssdStateMachine.states = states
|
UssdStateMachine.states = states
|
||||||
UssdStateMachine.transitions = transitions
|
UssdStateMachine.transitions = transitions
|
||||||
|
|
||||||
|
# retrieve default token data
|
||||||
|
default_token_data = get_default_token_data()
|
||||||
|
chain_str = Chain.spec.__str__()
|
||||||
|
|
||||||
|
# cache default token for re-usability
|
||||||
|
if default_token_data:
|
||||||
|
cache_key = create_cached_data_key(
|
||||||
|
identifier=chain_str.encode('utf-8'),
|
||||||
|
salt=':cic.default_token_data'
|
||||||
|
)
|
||||||
|
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||||
|
else:
|
||||||
|
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||||
|
|
||||||
|
|
||||||
def application(env, start_response):
|
def application(env, start_response):
|
||||||
"""Loads python code for application to be accessible over web server
|
"""Loads python code for application to be accessible over web server
|
||||||
@ -134,6 +130,8 @@ def application(env, start_response):
|
|||||||
:type env: dict
|
:type env: dict
|
||||||
:param start_response: Callable to define responses.
|
:param start_response: Callable to define responses.
|
||||||
:type start_response: any
|
:type start_response: any
|
||||||
|
:return: a list containing a bytes representation of the response object
|
||||||
|
:rtype: list
|
||||||
"""
|
"""
|
||||||
# define headers
|
# define headers
|
||||||
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
||||||
@ -194,20 +192,3 @@ def application(env, start_response):
|
|||||||
start_response('200 OK,', headers)
|
start_response('200 OK,', headers)
|
||||||
SessionBase.session.close()
|
SessionBase.session.close()
|
||||||
return [response_bytes]
|
return [response_bytes]
|
||||||
|
|
||||||
# handle pin requests
|
|
||||||
if get_request_endpoint(env) == '/pin':
|
|
||||||
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
|
|
||||||
phone_number = quote_plus(phone_number)
|
|
||||||
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
|
|
||||||
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
|
|
||||||
SessionBase.session.close()
|
|
||||||
start_response(message, headers)
|
|
||||||
return [response_bytes]
|
|
||||||
|
|
||||||
# handle requests for locked accounts
|
|
||||||
response, message = process_locked_accounts_requests(env=env)
|
|
||||||
response_bytes, headers = define_response_with_content(headers=headers, response=response)
|
|
||||||
start_response(message, headers)
|
|
||||||
SessionBase.session.close()
|
|
||||||
return [response_bytes]
|
|
38
apps/cic-ussd/cic_ussd/runnable/server_base.py
Normal file
38
apps/cic-ussd/cic_ussd/runnable/server_base.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
"""This module handles generic wsgi server configurations that can then be subsumed by different server flavors for the
|
||||||
|
cic-ussd component.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
|
||||||
|
# define a logging system
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
# define default config directory as would be defined in docker
|
||||||
|
default_config_dir = '/usr/local/etc/cic-ussd/'
|
||||||
|
|
||||||
|
# define args parser
|
||||||
|
arg_parser = ArgumentParser(description='CLI for handling cic-ussd server applications.')
|
||||||
|
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
|
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
|
||||||
|
arg_parser.add_argument('--env-prefix',
|
||||||
|
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||||
|
dest='env_prefix',
|
||||||
|
type=str,
|
||||||
|
help='environment prefix for variables to overwrite configuration')
|
||||||
|
exportable_parser = arg_parser
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -64,7 +64,7 @@ def has_sufficient_balance(state_machine_data: Tuple[str, dict, Account]) -> boo
|
|||||||
# get cached balance
|
# get cached balance
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(user.blockchain_address[2:]),
|
identifier=bytes.fromhex(user.blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cached_balance = get_cached_data(key=key)
|
cached_balance = get_cached_data(key=key)
|
||||||
operational_balance = compute_operational_balance(balances=json.loads(cached_balance))
|
operational_balance = compute_operational_balance(balances=json.loads(cached_balance))
|
||||||
|
@ -176,7 +176,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, Account]):
|
|||||||
blockchain_address = user.blockchain_address
|
blockchain_address = user.blockchain_address
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key=key)
|
user_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ def has_cached_user_metadata(state_machine_data: Tuple[str, dict, Account]):
|
|||||||
# check for user metadata in cache
|
# check for user metadata in cache
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key=key)
|
user_metadata = get_cached_data(key=key)
|
||||||
return user_metadata is not None
|
return user_metadata is not None
|
||||||
|
@ -136,7 +136,7 @@ def process_balances_callback(result: list, param: str, status_code: int):
|
|||||||
blockchain_address = balances_data.get('address')
|
blockchain_address = balances_data.get('address')
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(balances_data))
|
cache_data(key=key, data=json.dumps(balances_data))
|
||||||
else:
|
else:
|
||||||
@ -226,7 +226,7 @@ def process_statement_callback(result, param: str, status_code: int):
|
|||||||
|
|
||||||
# cache account statement
|
# cache account statement
|
||||||
identifier = bytes.fromhex(param[2:])
|
identifier = bytes.fromhex(param[2:])
|
||||||
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
|
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
|
||||||
data = json.dumps(processed_transactions)
|
data = json.dumps(processed_transactions)
|
||||||
|
|
||||||
# cache statement data
|
# cache statement data
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import semver
|
import semver
|
||||||
|
|
||||||
version = (0, 3, 0, 'alpha.9')
|
version = (0, 3, 0, 'alpha.10')
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
major=version[0],
|
major=version[0],
|
||||||
|
@ -38,8 +38,9 @@ COPY cic-ussd/transitions/ cic-ussd/transitions/
|
|||||||
COPY cic-ussd/var/ cic-ussd/var/
|
COPY cic-ussd/var/ cic-ussd/var/
|
||||||
|
|
||||||
COPY cic-ussd/docker/db.sh \
|
COPY cic-ussd/docker/db.sh \
|
||||||
cic-ussd/docker/start_tasker.sh \
|
cic-ussd/docker/start_cic_user_tasker.sh \
|
||||||
cic-ussd/docker/start_uwsgi.sh \
|
cic-ussd/docker/start_cic_user_ussd_server.sh\
|
||||||
|
cic-ussd/docker/start_cic_user_server.sh\
|
||||||
/root/
|
/root/
|
||||||
|
|
||||||
RUN chmod +x /root/*.sh
|
RUN chmod +x /root/*.sh
|
||||||
|
7
apps/cic-ussd/docker/start_cic_user_server.sh
Normal file
7
apps/cic-ussd/docker/start_cic_user_server.sh
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
user_server_port=${SERVER_PORT:-9500}
|
||||||
|
|
||||||
|
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_server.py --http :"$user_server_port" --pyargv "$@"
|
5
apps/cic-ussd/docker/start_cic_user_tasker.sh
Normal file
5
apps/cic-ussd/docker/start_cic_user_tasker.sh
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
/usr/local/bin/cic-user-tasker "$@"
|
7
apps/cic-ussd/docker/start_cic_user_ussd_server.sh
Normal file
7
apps/cic-ussd/docker/start_cic_user_ussd_server.sh
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
user_ussd_server_port=${SERVER_PORT:-9000}
|
||||||
|
|
||||||
|
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_ussd_server.py --http :"$user_ussd_server_port" --pyargv "$@"
|
@ -1,5 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
. /root/db.sh
|
|
||||||
|
|
||||||
/usr/local/bin/cic-ussd-tasker $@
|
|
@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
. /root/db.sh
|
|
||||||
|
|
||||||
server_port=${SERVER_PORT:-9000}
|
|
||||||
|
|
||||||
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :$server_port --pyargv "$@"
|
|
@ -35,6 +35,7 @@ packages =
|
|||||||
cic_ussd.menu
|
cic_ussd.menu
|
||||||
cic_ussd.metadata
|
cic_ussd.metadata
|
||||||
cic_ussd.runnable
|
cic_ussd.runnable
|
||||||
|
cic_ussd.runnable.daemons
|
||||||
cic_ussd.session
|
cic_ussd.session
|
||||||
cic_ussd.state_machine
|
cic_ussd.state_machine
|
||||||
cic_ussd.state_machine.logic
|
cic_ussd.state_machine.logic
|
||||||
@ -44,5 +45,5 @@ scripts =
|
|||||||
|
|
||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
cic-ussd-tasker = cic_ussd.runnable.tasker:main
|
cic-user-tasker = cic_ussd.runnable.daemons.cic_user_tasker:main
|
||||||
cic-ussd-client = cic_ussd.runnable.client:main
|
cic-ussd-client = cic_ussd.runnable.client:main
|
||||||
|
@ -105,7 +105,7 @@ def test_get_user_metadata(caplog,
|
|||||||
assert 'Get latest data status: 200' in caplog.text
|
assert 'Get latest data status: 200' in caplog.text
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cached_user_metadata = get_cached_data(key=key)
|
cached_user_metadata = get_cached_data(key=key)
|
||||||
assert cached_user_metadata
|
assert cached_user_metadata
|
||||||
|
@ -36,7 +36,7 @@ def test_has_cached_user_metadata(create_in_db_ussd_session,
|
|||||||
user = create_activated_user
|
user = create_activated_user
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(person_metadata))
|
cache_data(key=key, data=json.dumps(person_metadata))
|
||||||
result = has_cached_user_metadata(state_machine_data=state_machine_data)
|
result = has_cached_user_metadata(state_machine_data=state_machine_data)
|
||||||
|
2
apps/cic-ussd/tests/fixtures/user.py
vendored
2
apps/cic-ussd/tests/fixtures/user.py
vendored
@ -115,6 +115,6 @@ def cached_user_metadata(create_activated_user, init_redis_cache, person_metadat
|
|||||||
user_metadata = json.dumps(person_metadata)
|
user_metadata = json.dumps(person_metadata)
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=user_metadata)
|
cache_data(key=key, data=user_metadata)
|
||||||
|
@ -4,9 +4,13 @@ FROM python:3.8.6-slim-buster as compile-image
|
|||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install -y --no-install-recommends git gcc g++ libpq-dev gawk jq telnet wget openssl iputils-ping gnupg socat bash procps make python2 cargo
|
RUN apt-get install -y --no-install-recommends git gcc g++ libpq-dev gawk jq telnet wget openssl iputils-ping gnupg socat bash procps make python2 cargo
|
||||||
|
|
||||||
RUN apt-get install -y software-properties-common
|
RUN touch /etc/apt/sources.list.d/ethereum.list
|
||||||
RUN add-apt-repository ppa:ethereum/ethereum
|
RUN echo 'deb http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' > /etc/apt/sources.list.d/ethereum.list
|
||||||
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 1C52189C923F6CA9
|
RUN echo 'deb-src http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' >> /etc/apt/sources.list.d/ethereum.list
|
||||||
|
|
||||||
|
RUN cat etc/apt/sources.list.d/ethereum.list
|
||||||
|
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install solc
|
RUN apt-get install solc
|
||||||
RUN pip install --upgrade pip
|
RUN pip install --upgrade pip
|
||||||
@ -27,15 +31,6 @@ RUN echo Install confini schema files && \
|
|||||||
git checkout $cic_config_commit && \
|
git checkout $cic_config_commit && \
|
||||||
cp -v *.ini $CONFINI_DIR
|
cp -v *.ini $CONFINI_DIR
|
||||||
|
|
||||||
ARG cic_contracts_commit=698ef3a30fde8d7f2c498f1208fb0ff45d665501
|
|
||||||
ARG cic_contracts_url=https://gitlab.com/grassrootseconomics/cic-contracts.git/
|
|
||||||
RUN echo Install ABI collection for solidity interfaces used across all components && \
|
|
||||||
git clone --depth 1 $cic_contracts_url cic-contracts && \
|
|
||||||
cd cic-contracts && \
|
|
||||||
git fetch --depth 1 origin $cic_contracts_commit && \
|
|
||||||
git checkout $cic_contracts_commit && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
# Install nvm with node and npm
|
# Install nvm with node and npm
|
||||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||||
ENV NVM_DIR /root/.nvm
|
ENV NVM_DIR /root/.nvm
|
||||||
@ -52,54 +47,61 @@ RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh |
|
|||||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
||||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
||||||
|
|
||||||
RUN useradd --create-home grassroots
|
#RUN useradd --create-home grassroots
|
||||||
WORKDIR /home/grassroots
|
# WORKDIR /home/grassroots
|
||||||
USER grassroots
|
# USER grassroots
|
||||||
|
|
||||||
|
ARG pip_extra_args=""
|
||||||
|
ARG pip_index_url=https://pypi.org/simple
|
||||||
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
|
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
|
||||||
ARG cic_base_version=0.1.2a79
|
ARG cic_base_version=0.1.2b8
|
||||||
ARG cic_eth_version=0.11.0b8+build.c2286e5c
|
ARG cic_eth_version=0.11.0b12
|
||||||
ARG sarafu_faucet_version=0.0.2a28
|
ARG sarafu_token_version=0.0.1a8
|
||||||
ARG sarafu_token_version==0.0.1a6
|
ARG sarafu_faucet_version=0.0.3a2
|
||||||
ARG cic_contracts_version=0.0.2a2
|
RUN pip install --user --index-url https://pypi.org/simple --extra-index-url $pip_extra_index_url \
|
||||||
RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \
|
cic-base[full_graph]==$cic_base_version \
|
||||||
cic-eth==$cic_eth_version \
|
cic-eth==$cic_eth_version \
|
||||||
cic-contracts==$cic_contracts_version \
|
|
||||||
sarafu-faucet==$sarafu_faucet_version \
|
sarafu-faucet==$sarafu_faucet_version \
|
||||||
sarafu-token==$sarafu_token_version
|
sarafu-token==$sarafu_token_version \
|
||||||
|
cic-eth==$cic_eth_version
|
||||||
|
|
||||||
|
# -------------- begin runtime container ----------------
|
||||||
FROM python:3.8.6-slim-buster as runtime-image
|
FROM python:3.8.6-slim-buster as runtime-image
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install -y --no-install-recommends gnupg libpq-dev
|
RUN apt-get install -y --no-install-recommends gnupg libpq-dev
|
||||||
RUN apt-get install -y --no-install-recommends jq
|
RUN apt-get install -y jq bash iputils-ping socat
|
||||||
|
|
||||||
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
|
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
|
||||||
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/
|
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/
|
||||||
|
COPY --from=compile-image /usr/local/lib/python3.8/site-packages/ \
|
||||||
|
/usr/local/lib/python3.8/site-packages/
|
||||||
|
|
||||||
RUN useradd --create-home grassroots
|
ENV EXTRA_INDEX_URL https://pip.grassrootseconomics.net:8433
|
||||||
WORKDIR /home/grassroots
|
# RUN useradd -u 1001 --create-home grassroots
|
||||||
# COPY python dependencies to user dir
|
# RUN adduser grassroots sudo && \
|
||||||
COPY --from=compile-image /home/grassroots/.local .local
|
# echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||||
ENV PATH=/home/grassroots/.local/bin:$PATH
|
# WORKDIR /home/grassroots
|
||||||
|
|
||||||
COPY contract-migration/testdata/pgp testdata/pgp
|
COPY contract-migration/testdata/pgp testdata/pgp
|
||||||
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
|
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
|
||||||
COPY contract-migration/keystore keystore
|
COPY contract-migration/keystore keystore
|
||||||
COPY contract-migration/envlist .
|
COPY contract-migration/envlist .
|
||||||
|
COPY contract-migration/scripts scripts/
|
||||||
|
|
||||||
# RUN chown grassroots:grassroots .local/
|
|
||||||
|
|
||||||
RUN mkdir -p /tmp/cic/config
|
|
||||||
RUN chown grassroots:grassroots /tmp/cic/config
|
|
||||||
# A shared output dir for environment configs
|
# A shared output dir for environment configs
|
||||||
|
RUN mkdir -p /tmp/cic/config
|
||||||
|
# RUN chown grassroots:grassroots /tmp/cic/config
|
||||||
RUN chmod a+rwx /tmp/cic/config
|
RUN chmod a+rwx /tmp/cic/config
|
||||||
|
|
||||||
COPY contract-migration/*.sh ./
|
COPY contract-migration/*.sh ./
|
||||||
RUN chown grassroots:grassroots -R .
|
# RUN chown grassroots:grassroots -R .
|
||||||
RUN chmod gu+x *.sh
|
RUN chmod gu+x *.sh
|
||||||
|
|
||||||
|
# we copied these from the root build container.
|
||||||
|
# this is dumb though...I guess the compile image should have the same user
|
||||||
|
# RUN chown grassroots:grassroots -R /usr/local/lib/python3.8/site-packages/
|
||||||
|
|
||||||
USER grassroots
|
# USER grassroots
|
||||||
|
|
||||||
ENTRYPOINT [ ]
|
ENTRYPOINT [ ]
|
||||||
|
@ -3,10 +3,11 @@ const path = require('path');
|
|||||||
const http = require('http');
|
const http = require('http');
|
||||||
|
|
||||||
const cic = require('cic-client-meta');
|
const cic = require('cic-client-meta');
|
||||||
|
const crdt = require('crdt-meta');
|
||||||
|
|
||||||
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
||||||
|
|
||||||
const config = new cic.Config('./config');
|
const config = new crdt.Config('./config');
|
||||||
config.process();
|
config.process();
|
||||||
console.log(config);
|
console.log(config);
|
||||||
|
|
||||||
@ -41,7 +42,7 @@ function sendit(uid, envelope) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function doOne(keystore, filePath) {
|
function doOne(keystore, filePath) {
|
||||||
const signer = new cic.PGPSigner(keystore);
|
const signer = new crdt.PGPSigner(keystore);
|
||||||
const parts = path.basename(filePath).split('.');
|
const parts = path.basename(filePath).split('.');
|
||||||
const ethereum_address = path.basename(parts[0]);
|
const ethereum_address = path.basename(parts[0]);
|
||||||
|
|
||||||
@ -51,7 +52,7 @@ function doOne(keystore, filePath) {
|
|||||||
//console.log(o);
|
//console.log(o);
|
||||||
fs.unlinkSync(filePath);
|
fs.unlinkSync(filePath);
|
||||||
|
|
||||||
const s = new cic.Syncable(uid, o);
|
const s = new crdt.Syncable(uid, o);
|
||||||
s.setSigner(signer);
|
s.setSigner(signer);
|
||||||
s.onwrap = (env) => {
|
s.onwrap = (env) => {
|
||||||
sendit(uid, env);
|
sendit(uid, env);
|
||||||
@ -65,7 +66,7 @@ const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_P
|
|||||||
pk = fs.readFileSync(privateKeyPath);
|
pk = fs.readFileSync(privateKeyPath);
|
||||||
pubk = fs.readFileSync(publicKeyPath);
|
pubk = fs.readFileSync(publicKeyPath);
|
||||||
|
|
||||||
new cic.PGPKeyStore(
|
new crdt.PGPKeyStore(
|
||||||
config.get('PGP_PASSPHRASE'),
|
config.get('PGP_PASSPHRASE'),
|
||||||
pk,
|
pk,
|
||||||
pubk,
|
pubk,
|
||||||
|
@ -4,10 +4,11 @@ const http = require('http');
|
|||||||
|
|
||||||
const cic = require('cic-client-meta');
|
const cic = require('cic-client-meta');
|
||||||
const vcfp = require('vcard-parser');
|
const vcfp = require('vcard-parser');
|
||||||
|
const crdt = require('crdt-meta');
|
||||||
|
|
||||||
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
||||||
|
|
||||||
const config = new cic.Config('./config');
|
const config = new crdt.Config('./config');
|
||||||
config.process();
|
config.process();
|
||||||
console.log(config);
|
console.log(config);
|
||||||
|
|
||||||
@ -42,7 +43,7 @@ function sendit(uid, envelope) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function doOne(keystore, filePath, address) {
|
function doOne(keystore, filePath, address) {
|
||||||
const signer = new cic.PGPSigner(keystore);
|
const signer = new crdt.PGPSigner(keystore);
|
||||||
|
|
||||||
const j = JSON.parse(fs.readFileSync(filePath).toString());
|
const j = JSON.parse(fs.readFileSync(filePath).toString());
|
||||||
const b = Buffer.from(j['vcard'], 'base64');
|
const b = Buffer.from(j['vcard'], 'base64');
|
||||||
@ -51,9 +52,8 @@ function doOne(keystore, filePath, address) {
|
|||||||
const phone = o.tel[0].value;
|
const phone = o.tel[0].value;
|
||||||
|
|
||||||
cic.Phone.toKey(phone).then((uid) => {
|
cic.Phone.toKey(phone).then((uid) => {
|
||||||
const o = fs.readFileSync(filePath, 'utf-8');
|
|
||||||
|
|
||||||
const s = new cic.Syncable(uid, o);
|
const s = new crdt.Syncable(uid, address);
|
||||||
s.setSigner(signer);
|
s.setSigner(signer);
|
||||||
s.onwrap = (env) => {
|
s.onwrap = (env) => {
|
||||||
sendit(uid, env);
|
sendit(uid, env);
|
||||||
@ -67,7 +67,7 @@ const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_P
|
|||||||
pk = fs.readFileSync(privateKeyPath);
|
pk = fs.readFileSync(privateKeyPath);
|
||||||
pubk = fs.readFileSync(publicKeyPath);
|
pubk = fs.readFileSync(publicKeyPath);
|
||||||
|
|
||||||
new cic.PGPKeyStore(
|
new crdt.PGPKeyStore(
|
||||||
config.get('PGP_PASSPHRASE'),
|
config.get('PGP_PASSPHRASE'),
|
||||||
pk,
|
pk,
|
||||||
pubk,
|
pubk,
|
||||||
@ -123,7 +123,7 @@ function importMetaPhone(keystore) {
|
|||||||
if (batchCount == batchSize) {
|
if (batchCount == batchSize) {
|
||||||
console.debug('reached batch size, breathing');
|
console.debug('reached batch size, breathing');
|
||||||
batchCount=0;
|
batchCount=0;
|
||||||
setTimeout(importMeta, batchDelay, keystore);
|
setTimeout(importMetaPhone, batchDelay, keystore);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
27
apps/contract-migration/scripts/package-lock.json
generated
27
apps/contract-migration/scripts/package-lock.json
generated
@ -2112,9 +2112,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@types/node": {
|
"@types/node": {
|
||||||
"version": "14.14.41",
|
"version": "14.14.39",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.39.tgz",
|
||||||
"integrity": "sha512-dueRKfaJL4RTtSa7bWeTK1M+VH+Gns73oCgzvYfHZywRCoPSd8EkXBL0mZ9unPTveBn+D9phZBaxuzpwjWkW0g=="
|
"integrity": "sha512-Qipn7rfTxGEDqZiezH+wxqWYR8vcXq5LRpZrETD19Gs4o8LbklbmqotSUsMU+s5G3PJwMRDfNEYoxrcBwIxOuw=="
|
||||||
},
|
},
|
||||||
"@types/pbkdf2": {
|
"@types/pbkdf2": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
@ -2380,14 +2380,15 @@
|
|||||||
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
|
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
|
||||||
},
|
},
|
||||||
"cic-client-meta": {
|
"cic-client-meta": {
|
||||||
"version": "0.0.7-alpha.6",
|
"version": "0.0.7-alpha.8",
|
||||||
"resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.6.tgz",
|
"resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.8.tgz",
|
||||||
"integrity": "sha512-oIN1aHkPHfsxJKDV6k4f1kX2tcppw3Q+D1b4BoPh0hYjNKNb7gImBMWnGsy8uiD9W6SNYE4sIXyrtct8mvrhsw==",
|
"integrity": "sha512-NtU4b4dptG2gsKXIvAv1xCxxxhrr801tb8+Co1O+VLx+wvxFyPRxqa2f2eN5nrSnFnljNsWWpE6K5bJZb1+Rqw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@ethereumjs/tx": "^3.0.0-beta.1",
|
"@ethereumjs/tx": "^3.0.0-beta.1",
|
||||||
"automerge": "^0.14.1",
|
"automerge": "^0.14.1",
|
||||||
|
"crdt-meta": "0.0.8",
|
||||||
"ethereumjs-wallet": "^1.0.1",
|
"ethereumjs-wallet": "^1.0.1",
|
||||||
"ini": "^1.3.5",
|
"ini": "^1.3.8",
|
||||||
"openpgp": "^4.10.8",
|
"openpgp": "^4.10.8",
|
||||||
"pg": "^8.4.2",
|
"pg": "^8.4.2",
|
||||||
"sqlite3": "^5.0.0",
|
"sqlite3": "^5.0.0",
|
||||||
@ -2494,6 +2495,18 @@
|
|||||||
"printj": "~1.1.0"
|
"printj": "~1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"crdt-meta": {
|
||||||
|
"version": "0.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/crdt-meta/-/crdt-meta-0.0.8.tgz",
|
||||||
|
"integrity": "sha512-CS0sS0L2QWthz7vmu6vzl3p4kcpJ+IKILBJ4tbgN4A3iNG8wnBeuDIv/z3KFFQjcfuP4QAh6E9LywKUTxtDc3g==",
|
||||||
|
"requires": {
|
||||||
|
"automerge": "^0.14.2",
|
||||||
|
"ini": "^1.3.8",
|
||||||
|
"openpgp": "^4.10.8",
|
||||||
|
"pg": "^8.5.1",
|
||||||
|
"sqlite3": "^5.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"create-hash": {
|
"create-hash": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cic-client-meta": "0.0.7-alpha.6",
|
"cic-client-meta": "^0.0.7-alpha.8",
|
||||||
|
"crdt-meta": "0.0.8",
|
||||||
"vcard-parser": "^1.0.0"
|
"vcard-parser": "^1.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
cic-base[full_graph]==0.1.2b2
|
cic-base[full_graph]==0.1.2b8
|
||||||
sarafu-faucet==0.0.2a28
|
sarafu-faucet==0.0.3a2
|
||||||
cic-eth==0.11.0b10
|
cic-eth==0.11.0b12
|
||||||
cic-types==0.1.0a10
|
cic-types==0.1.0a10
|
||||||
crypto-dev-signer==0.4.14b3
|
crypto-dev-signer==0.4.14b3
|
||||||
|
@ -345,12 +345,11 @@ class Verifier:
|
|||||||
address_recovered = address_recovered.replace('"', '')
|
address_recovered = address_recovered.replace('"', '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
address = strip_0x(address)
|
upper_address_recovered = strip_0x(address_recovered).upper()
|
||||||
address_recovered = strip_0x(address_recovered)
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise VerifierError(address_recovered, 'metadata (phone) address {} address recovered {}'.format(address, address_recovered))
|
raise VerifierError(address_recovered, 'metadata (phone) address {} address recovered {}'.format(address, address_recovered))
|
||||||
|
|
||||||
if address != address_recovered:
|
if upper_address != upper_address_recovered:
|
||||||
raise VerifierError(address_recovered, 'metadata (phone)')
|
raise VerifierError(address_recovered, 'metadata (phone)')
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,11 +13,10 @@ DEV_PIP_EXTRA_INDEX_URL=${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomi
|
|||||||
DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
|
DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
|
||||||
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
|
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
|
||||||
ETH_PASSPHRASE=''
|
ETH_PASSPHRASE=''
|
||||||
DEV_TOKEN_TYPE=${DEV_TOKEN_TYPE:-giftable}
|
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
|
||||||
if [ $DEV_TOKEN_TYPE = 'giftable' ]; then
|
if [[ $CIC_DEFAULT_TOKEN_SYMBOL != 'GFT' && $CIC_DEFAULT_TOKEN_SYMBOL != 'SRF' ]]; then
|
||||||
token_symbol='GFT'
|
>&2 echo CIC_DEFAULT_TOKEN_SYMBOL must be one of [GFT,SRF], but was $CIC_DEFAULT_TOKEN_SYMBOL
|
||||||
else
|
exit 1
|
||||||
token_symbol='SRF'
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Debug flag
|
# Debug flag
|
||||||
@ -100,7 +99,7 @@ export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
|||||||
|
|
||||||
# Transfer tokens to gifter address
|
# Transfer tokens to gifter address
|
||||||
>&2 echo "transfer sarafu tokens to token gifter address"
|
>&2 echo "transfer sarafu tokens to token gifter address"
|
||||||
>&2 eth-transfer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_RESERVE_ADDRESS -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
>&2 erc20-transfer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_RESERVE_ADDRESS -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
||||||
|
|
||||||
#echo -n 0 > $init_level_file
|
#echo -n 0 > $init_level_file
|
||||||
|
|
||||||
|
@ -53,8 +53,6 @@ services:
|
|||||||
command: [ "-c", "max_connections=200" ]
|
command: [ "-c", "max_connections=200" ]
|
||||||
volumes:
|
volumes:
|
||||||
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
|
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
|
||||||
- ./apps/cic-meta/scripts/initdb/postgresql.sh:/docker-entrypoint-initdb.d/2-init-cic-meta.sh
|
|
||||||
- ./apps/cic-cache/db/psycopg2/db.sql:/docker-entrypoint-initdb.d/3-init-cic-meta.sql
|
|
||||||
- postgres-db:/var/lib/postgresql/data
|
- postgres-db:/var/lib/postgresql/data
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
@ -78,6 +76,9 @@ services:
|
|||||||
|
|
||||||
contract-migration:
|
contract-migration:
|
||||||
build:
|
build:
|
||||||
|
args:
|
||||||
|
pip_index_url: ${PIP_DEFAULT_INDEX_URL:-https://pypi.org/simple}
|
||||||
|
pip_extra_args: $PIP_EXTRA_ARGS
|
||||||
context: apps/
|
context: apps/
|
||||||
dockerfile: contract-migration/docker/Dockerfile
|
dockerfile: contract-migration/docker/Dockerfile
|
||||||
# image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:latest
|
# image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:latest
|
||||||
@ -477,6 +478,7 @@ services:
|
|||||||
PGP_PUBLICKEY_TRUSTED_FILE: publickeys.asc
|
PGP_PUBLICKEY_TRUSTED_FILE: publickeys.asc
|
||||||
PGP_PUBLICKEY_ACTIVE_FILE: publickeys.asc
|
PGP_PUBLICKEY_ACTIVE_FILE: publickeys.asc
|
||||||
PGP_PUBLICKEY_ENCRYPT_FILE: publickeys.asc
|
PGP_PUBLICKEY_ENCRYPT_FILE: publickeys.asc
|
||||||
|
SCHEMA_SQL_PATH: scripts/initdb/server.postgres.sql
|
||||||
ports:
|
ports:
|
||||||
- ${HTTP_PORT_CIC_META:-63380}:8000
|
- ${HTTP_PORT_CIC_META:-63380}:8000
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -488,8 +490,7 @@ services:
|
|||||||
- ${LOCAL_VOLUME_DIR:-/tmp/cic}/pgp:/tmp/cic/pgp
|
- ${LOCAL_VOLUME_DIR:-/tmp/cic}/pgp:/tmp/cic/pgp
|
||||||
# command: "/root/start_server.sh -vv"
|
# command: "/root/start_server.sh -vv"
|
||||||
|
|
||||||
cic-ussd-server:
|
cic-user-ussd-server:
|
||||||
# image: grassrootseconomics:cic-ussd
|
|
||||||
build:
|
build:
|
||||||
context: apps/
|
context: apps/
|
||||||
dockerfile: cic-ussd/docker/Dockerfile
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
@ -507,7 +508,7 @@ services:
|
|||||||
SERVER_PORT: 9000
|
SERVER_PORT: 9000
|
||||||
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
||||||
ports:
|
ports:
|
||||||
- ${HTTP_PORT_CIC_USSD:-63315}:9000
|
- ${HTTP_PORT_CIC_USER_USSD_SERVER:-63315}:9000
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- redis
|
- redis
|
||||||
@ -516,10 +517,31 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_uwsgi.sh -vv"
|
command: "/root/start_cic_user_ussd_server.sh -vv"
|
||||||
|
|
||||||
cic-ussd-tasker:
|
cic-user-server:
|
||||||
# image: grassrootseconomics:cic-ussd
|
build:
|
||||||
|
context: apps
|
||||||
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
|
environment:
|
||||||
|
DATABASE_USER: grassroots
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_PASSWORD: tralala
|
||||||
|
DATABASE_NAME: cic_ussd
|
||||||
|
DATABASE_ENGINE: postgresql
|
||||||
|
DATABASE_DRIVER: psycopg2
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
|
ports:
|
||||||
|
- ${HTTP_PORT_CIC_USER_SERVER:-63415}:9500
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
deploy:
|
||||||
|
restart_policy:
|
||||||
|
condition: on-failure
|
||||||
|
command: "/root/start_cic_user_server.sh -vv"
|
||||||
|
|
||||||
|
cic-user-tasker:
|
||||||
build:
|
build:
|
||||||
context: apps
|
context: apps
|
||||||
dockerfile: cic-ussd/docker/Dockerfile
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
@ -544,4 +566,4 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_tasker.sh -q cic-ussd -vv"
|
command: "/root/start_cic_user_tasker.sh -q cic-ussd -vv"
|
||||||
|
Loading…
Reference in New Issue
Block a user