This commit is contained in:
William Luke 2021-11-01 10:49:07 +03:00
commit 7c73c8b30f
57 changed files with 6243 additions and 276 deletions

View File

@ -10,6 +10,7 @@ include:
#- local: 'apps/data-seeding/.gitlab-ci.yml'
stages:
- version
- build
- test
- deploy
@ -20,9 +21,39 @@ variables:
DOCKER_BUILDKIT: "1"
COMPOSE_DOCKER_CLI_BUILD: "1"
CI_DEBUG_TRACE: "true"
SEMVERBOT_VERSION: "0.2.0"
before_script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
#before_script:
# - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
version:
#image: python:3.7-stretch
image: registry.gitlab.com/grassrootseconomics/cic-base-images/ci-version:b01318ae
stage: version
script:
- mkdir -p ~/.ssh && chmod 700 ~/.ssh
- ssh-keyscan gitlab.com >> ~/.ssh/known_hosts && chmod 644 ~/.ssh/known_hosts
- eval $(ssh-agent -s)
- ssh-add <(echo "$SSH_PRIVATE_KEY")
- git remote set-url origin git@gitlab.com:grassrootseconomics/cic-internal-integration.git
- export TAG=$(sbot predict version -m auto)
- |
if [[ -z $TAG ]]
then
echo "tag could not be set $@"
exit 1
fi
- echo $TAG > version
- git tag -a v$TAG -m "ci tagged"
- git push origin v$TAG
artifacts:
paths:
- version
rules:
- if: $CI_COMMIT_REF_PROTECTED == "true"
when: always
- if: $CI_COMMIT_REF_NAME == "master"
when: always
# runs on protected branches and pushes to repo
build-push:
@ -30,12 +61,17 @@ build-push:
tags:
- integration
#script:
# - TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
# - TAG=$CI_Cbefore_script:
before_script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
script:
- TAG=latest sh ./scripts/build-push.sh
- TAG=latest ./scripts/build-push.sh
- TAG=$(cat ./version) ./scripts/build-push.sh
rules:
- if: $CI_COMMIT_REF_PROTECTED == "true"
when: always
- if: $CI_COMMIT_REF_NAME == "master"
when: always
deploy-dev:
stage: deploy

16
.semverbot.toml Normal file
View File

@ -0,0 +1,16 @@
[git]
[git.config]
email = "semverbot@grassroots.org"
name = "semvervot"
[git.tags]
prefix = "v"
[semver]
mode = "git-commit"
[semver.detection]
patch = ["fix", "[fix]", "patch", "[patch]"]
minor = ["minor", "[minor]", "feat", "[feat]", "release", "[release]", "bump", "[bump]"]
major = ["BREAKING CHANGE"]

View File

@ -4,6 +4,7 @@
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
```
export COMPOSE_DOCKER_CLI_BUILD=1
export DOCKER_BUILDKIT=1

View File

@ -1,19 +1,16 @@
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
COPY requirements.txt .
RUN apt-get install libffi-dev -y
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL="https://pypi.org/simple"
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--force-reinstall \
--no-cache \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt

View File

@ -17,11 +17,12 @@ logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
config_dir = os.path.join('/usr/local/etc/cic-cache')
#config_dir = os.path.join('/usr/local/etc/cic-cache')
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-c', type=str, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
@ -35,7 +36,7 @@ if args.vv:
elif args.v:
logging.getLogger().setLevel(logging.INFO)
config = confini.Config(args.c, args.env_prefix)
config = confini.Config(configdir, args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')

View File

@ -683,3 +683,4 @@ class Api(ApiBase):
t = self.callback_success.apply_async([r])
return t

View File

@ -10,7 +10,7 @@ version = (
0,
12,
4,
'alpha.13',
'alpha.14',
)
version_object = semver.VersionInfo(

View File

@ -1,4 +1,3 @@
@node cic-eth-accounts
@section Accounts
Accounts are private keys in the signer component keyed by "addresses," a one-way transformation of a public key. Data can be signed by using the account as identifier for corresponding RPC requests.

View File

@ -1,4 +1,4 @@
@node cic-eth system maintenance
@anchor{cic-eth-appendix-system-maintenance}
@appendix Admin API
The admin API is still in an early stage of refinement. User friendliness can be considerably improved.
@ -33,7 +33,7 @@ Get the current state of a lock
@appendixsection tag_account
Associate an identifier with an account address (@xref{cic-eth system accounts})
Associate an identifier with an account address (@xref{cic-eth-system-accounts})
@appendixsection have_account

View File

@ -14,5 +14,6 @@ Released 2021 under GPL3
@c
@contents
@include index.texi
@include content.texi
@include appendix.texi

View File

@ -0,0 +1,3 @@
@include admin.texi
@include chains.texi
@include transfertypes.texi

View File

@ -1,4 +1,4 @@
@node cic-eth Appendix Task chains
@anchor{cic-eth-appendix-task-chains}
@appendix Task chains
TBC - explain here how to generate these chain diagrams

View File

@ -1,4 +1,3 @@
@node cic-eth configuration
@section Configuration
Configuration parameters are grouped by configuration filename.

View File

@ -1,6 +1,6 @@
@node cic-eth
@top cic-eth
@include intro.texi
@include dependencies.texi
@include configuration.texi
@include system.texi
@ -9,6 +9,3 @@
@include incoming.texi
@include services.texi
@include tools.texi
@include admin.texi
@include chains.texi
@include transfertypes.texi

View File

@ -1,4 +1,3 @@
@node cic-eth-dependencies
@section Dependencies
This application is written in Python 3.8. It is tightly coupled with @code{python-celery}, which provides the task worker ecosystem. It also uses @code{SQLAlchemy} which provides useful abstractions for persistent storage though SQL, and @code{alembic} for database schema migrations.

View File

@ -1,4 +1,4 @@
@node cic-eth-incoming
@anchor{cic-eth-incoming}
@section Incoming transactions
All transactions in mined blocks will be passed to a selection of plugin filters to the @code{chainsyncer} component. Each of these filters are individual python module files in @code{cic_eth.runnable.daemons.filters}. This section describes their function.

View File

@ -1,9 +1,8 @@
@node cic-eth-interacting
@section Interacting with the system
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth Appendix Task chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth-appendix-task-chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth system maintenance,the Admin API appendix}.
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth-appendix-system-maintenance,the Admin API appendix}.
@subsection Interface

View File

@ -1,4 +1,3 @@
@node cic-eth-outgoing
@section Outgoing transactions
@strong{Important! A pre-requisite for proper functioning of the component is that no other agent is sending transactions to the network for any of the keys in the keystore.}

View File

@ -1,4 +1,3 @@
@node cic-eth-services
@section Services
There are four daemons that together orchestrate all of the aforementioned recipes. This section will provide a high level description of them.

View File

@ -1,10 +1,10 @@
@node cic-eth system accounts
@section System initialization
When the system starts for the first time, it is locked for any state change request other than account creation@footnote{Specifically, the @code{INIT}, @code{SEND} and @code{QUEUE} lock bits are set.}. These locks should be @emph{reset} once system initialization has been completed. Currently, system initialization only involves creating and tagging required system accounts, as specified below.
See @ref{cic-eth-locking,Locking} and @ref{cic-eth-tools-ctrl,ctrl in Tools} for details on locking.
@anchor{cic-eth-system-accounts}
@subsection System accounts
Certain accounts in the system have special roles. These are defined by @emph{tagging} certain accounts addresses with well-known identifiers.

View File

@ -1,4 +1,3 @@
@node cic-eth-tools
@section Tools
A collection of CLI tools have been provided to help with diagnostics and other administrative tasks. These use the same configuration infrastructure as the daemons.
@ -37,7 +36,7 @@ Execute a token transfer on behalf of a custodial account.
@subsection tag (cic-eth-tag)
Associate an account address with a string identifier. @xref{cic-eth system accounts}
Associate an account address with a string identifier. @xref{cic-eth-system-accounts}
@anchor{cic-eth-tools-ctrl}

View File

@ -1,4 +1,3 @@
@node cic-eth Appendix Transaction types
@appendix Transfer types
@table @var

View File

@ -1,6 +1,6 @@
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
# do load these all into setup.py later
@ -11,16 +11,16 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple
RUN apt-get install libffi-dev
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt \

View File

@ -1,5 +1,5 @@
celery==4.4.7
chainlib-eth>=0.0.10a4,<0.1.0
chainlib-eth>=0.0.10a16,<0.1.0
semver==2.13.0
crypto-dev-signer>=0.4.15rc2,<0.5.0
uwsgi==2.0.19.1

View File

@ -1,19 +1,23 @@
FROM node:15.3.0-alpine3.10
#FROM node:lts-alpine3.14
WORKDIR /root
RUN apk add --no-cache postgresql bash
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
RUN npm config set snyk=false
#RUN npm config set registry={NPM_REPOSITORY}
RUN npm config set registry=${NPM_REPOSITORY}
# copy the dependencies
COPY package.json package-lock.json ./
#RUN --mount=type=cache,mode=0755,target=/root/.npm \
RUN npm set cache /root/.npm && \
RUN --mount=type=cache,mode=0755,target=/root/.npm \
npm set cache /root/.npm && \
npm cache verify && \
npm ci --verbose
COPY webpack.config.js .
COPY tsconfig.json .
COPY webpack.config.js ./
COPY tsconfig.json ./
## required to build the cic-client-meta module
COPY . .
COPY tests/*.asc /root/pgp/

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,9 @@
create table if not exists store (
id serial primary key not null,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text
);
create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,9 +1,10 @@
create table if not exists store (
/*id serial primary key not null,*/
id integer primary key autoincrement,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text
);
create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,12 +1,13 @@
import * as Automerge from 'automerge';
import * as pgp from 'openpgp';
import * as crypto from 'crypto';
import { Envelope, Syncable } from '@cicnet/crdt-meta';
import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
function handleNoMergeGet(db, digest, keystore) {
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'";
return new Promise<string|boolean>((whohoo, doh) => {
const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
return new Promise<any>((whohoo, doh) => {
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
@ -16,16 +17,36 @@ function handleNoMergeGet(db, digest, keystore) {
return;
}
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
let mimeType;
if (immutable) {
if (rs.rows[0]['mime_type'] === undefined) {
mimeType = 'application/octet-stream';
} else {
mimeType = rs.rows[0]['mime_type'];
}
} else {
mimeType = 'application/json';
}
const cipherText = rs.rows[0]['content'];
pgp.message.readArmored(cipherText).then((m) => {
const opts = {
message: m,
privateKeys: [keystore.getPrivateKey()],
format: 'binary',
};
pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data);
const r = JSON.stringify(o.m['data']);
whohoo(r);
let r;
if (immutable) {
r = plainText.data;
} else {
mimeType = 'application/json';
const d = new TextDecoder().decode(plainText.data);
const o = Syncable.fromJSON(d);
r = JSON.stringify(o.m['data']);
}
whohoo([r, mimeType]);
}).catch((e) => {
console.error('decrypt', e);
doh(e);
@ -57,6 +78,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
} else {
e = Envelope.fromJSON(v);
s = e.unwrap();
console.debug('s', s, o)
s.replace(o, 'server merge');
e.set(s);
s.onwrap = (e) => {
@ -139,7 +161,13 @@ function handleClientMergeGet(db, digest, keystore) {
privateKeys: [keystore.getPrivateKey()],
};
pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data);
let d;
if (typeof(plainText.data) == 'string') {
d = plainText.data;
} else {
d = new TextDecoder().decode(plainText.data);
}
const o = Syncable.fromJSON(d);
const e = new Envelope(o);
whohoo(e.toJSON());
}).catch((e) => {
@ -201,10 +229,65 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
});
}
function handleImmutablePost(data, db, digest, keystore, contentType) {
return new Promise<Array<string|boolean>>((whohoo, doh) => {
let data_binary = data;
const h = crypto.createHash('sha256');
h.update(data_binary);
const z = h.digest();
const r = bytesToHex(z);
if (digest) {
if (r != digest) {
doh('hash mismatch: ' + r + ' != ' + digest);
return;
}
} else {
digest = r;
console.debug('calculated digest ' + digest);
}
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
if (haveDigest !== false) {
whohoo([false, digest]);
return;
}
let message;
if (typeof(data) == 'string') {
data_binary = new TextEncoder().encode(data);
message = pgp.message.fromText(data);
} else {
message = pgp.message.fromBinary(data);
}
const opts = {
message: message,
publicKeys: keystore.getEncryptKeys(),
};
pgp.encrypt(opts).then((cipherText) => {
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
return;
}
whohoo([true, digest]);
});
}).catch((e) => {
doh(e);
});
}).catch((e) => {
doh(e);
});
});
}
export {
handleClientMergePut,
handleClientMergeGet,
handleServerMergePost,
handleServerMergePut,
handleNoMergeGet,
handleImmutablePost,
};

View File

@ -118,37 +118,71 @@ async function processRequest(req, res) {
return;
}
let mod = req.method.toLowerCase() + ":automerge:";
let modDetail = undefined;
let immutablePost = false;
try {
digest = parseDigest(req.url);
} catch(e) {
console.error('digest error: ' + e)
if (req.url == '/') {
immutablePost = true;
modDetail = 'immutable';
} else {
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
res.writeHead(400, {"Content-Type": "text/plain"});
res.end();
return;
}
const mergeHeader = req.headers['x-cic-automerge'];
let mod = req.method.toLowerCase() + ":automerge:";
switch (mergeHeader) {
case "client":
mod += "client"; // client handles merges
break;
case "server":
mod += "server"; // server handles merges
break;
default:
mod += "none"; // merged object only (get only)
}
let data = '';
if (modDetail === undefined) {
const mergeHeader = req.headers['x-cic-automerge'];
switch (mergeHeader) {
case "client":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "client"; // client handles merges
break;
case "server":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "server"; // server handles merges
break;
case "immutable":
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
break;
default:
modDetail = "none"; // merged object only (get only)
}
}
mod += modDetail;
// handle bigger chunks of data
let data;
req.on('data', (d) => {
if (data === undefined) {
data = d;
} else {
data += d;
}
});
req.on('end', async () => {
console.debug('mode', mod);
let content = '';
req.on('end', async (d) => {
let inputContentType = req.headers['content-type'];
let debugString = 'executing mode ' + mod ;
if (data !== undefined) {
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
}
console.debug(debugString);
let content;
let contentType = 'application/json';
console.debug('handling data', data);
let statusCode = 200;
let r:any = undefined;
try {
switch (mod) {
@ -176,6 +210,7 @@ async function processRequest(req, res) {
res.end();
return;
}
content = '';
break;
//case 'get:automerge:server':
// content = await handlers.handleServerMergeGet(db, digest, keystore);
@ -183,12 +218,24 @@ async function processRequest(req, res) {
case 'get:automerge:none':
r = await handlers.handleNoMergeGet(db, digest, keystore);
if (r == false) {
if (r === false) {
res.writeHead(404, {"Content-Type": "text/plain"});
res.end();
return;
}
content = r;
content = r[0];
contentType = r[1];
break;
case 'post:automerge:immutable':
if (inputContentType === undefined) {
inputContentType = 'application/octet-stream';
}
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
if (r[0]) {
statusCode = 201;
}
content = r[1];
break;
default:
@ -210,8 +257,15 @@ async function processRequest(req, res) {
return;
}
const responseContentLength = (new TextEncoder().encode(content)).length;
res.writeHead(200, {
//let responseContentLength;
//if (typeof(content) == 'string') {
// (new TextEncoder().encode(content)).length;
//}
const responseContentLength = content.length;
//if (responseContentLength === undefined) {
// responseContentLength = 0;
//}
res.writeHead(statusCode, {
"Access-Control-Allow-Origin": "*",
"Content-Type": contentType,
"Content-Length": responseContentLength,

View File

@ -7,6 +7,8 @@ import * as handlers from '../scripts/server/handlers';
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
import { SqliteAdapter } from '../src/db';
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
function createKeystore() {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
@ -44,11 +46,13 @@ function createDatabase(sqlite_file:string):Promise<any> {
// doh(e);
// return;
// }
// get this from real sql files sources
const sql = `CREATE TABLE store (
id integer primary key autoincrement,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text default null
);
`
@ -111,15 +115,18 @@ describe('server', async () => {
let j = env.toJSON();
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(content); // true-ish
console.debug('content', content);
let v = await handlers.handleNoMergeGet(db, digest, keystore);
if (v === undefined) {
if (v === false) {
db.close();
assert.fail('');
}
db.close();
return;
v = await handlers.handleClientMergeGet(db, digest, keystore);
if (v === undefined) {
if (v === false) {
db.close();
assert.fail('');
}
@ -187,7 +194,7 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
let o = JSON.parse(j[0]);
o.bar = 'xyzzy';
j = JSON.stringify(o);
@ -212,82 +219,39 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('server_merge', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = 'deadbeef';
let s = new Syncable(digest, {
bar: 'baz',
});
let env = await wrap(s, signer)
let j:any = env.toJSON();
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(v); // true-ish
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
o.bar = 'xyzzy';
j = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
console.log(o);
db.close();
});
// await it('server_merge_empty', async () => {
// await it('server_merge', async () => {
// const keystore = await createKeystore();
// const signer = new PGPSigner(keystore);
//
// const db = await createDatabase(__dirname + '/db.three.sqlite');
//
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
// let o:any = {
// foo: 'bar',
// xyzzy: 42,
// }
// let j:any = JSON.stringify(o);
// const digest = 'deadbeef';
// let s = new Syncable(digest, {
// bar: 'baz',
// });
// let env = await wrap(s, signer)
// let j:any = env.toJSON();
//
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
// assert(v); // true-ish
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
// assert(v); // true-ish
//
// let o = JSON.parse(j);
// o.bar = 'xyzzy';
// j = JSON.stringify(o);
//
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
// assert(signMaterial)
//
// const env = Envelope.fromJSON(signMaterial);
// env = Envelope.fromJSON(signMaterial);
//
// console.log('envvvv', env);
//
@ -301,7 +265,7 @@ describe('server', async () => {
// j = JSON.stringify(o);
// console.log(j);
//
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// assert(v);
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
@ -311,5 +275,88 @@ describe('server', async () => {
//
// db.close();
// });
//
await it('server_merge_empty', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
let o:any = {
foo: 'bar',
xyzzy: 42,
}
let j:any = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
const env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('immutable_nodigest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
let r;
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
const b:Uint8Array = new TextEncoder().encode(s);
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
await it('immutable_digest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
const b:Uint8Array = new TextEncoder().encode(s);
let r;
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(!r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
});

View File

@ -9,7 +9,7 @@ import semver
logg = logging.getLogger()
version = (0, 4, 0, 'alpha.10')
version = (0, 4, 0, 'alpha.11')
version_object = semver.VersionInfo(
major=version[0],

View File

@ -1,5 +1,6 @@
# syntax = docker/dockerfile:1.2
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
RUN apt-get install libffi-dev -y
@ -11,7 +12,9 @@ ARG PIP_INDEX_URL=https://pypi.org/simple
COPY requirements.txt .
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt

View File

@ -1,7 +1,8 @@
confini~=0.4.1a1
confini>=0.3.6rc4,<0.5.0
africastalking==1.2.3
SQLAlchemy==1.3.20
alembic==1.4.2
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3
semver==2.13.0

View File

@ -1,6 +1,6 @@
ARG DOCKER_REGISTRY=registry.gitlab.com/grassrootseconomics
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e as dev
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2 as dev
WORKDIR /root
@ -11,10 +11,9 @@ COPY requirements.txt .
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL="https://pypi.org/simple"
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--force-reinstall \
--no-cache \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt

View File

@ -1,7 +1,7 @@
# standard imports
import semver
version = (0, 3, 1, 'alpha.5')
version = (0, 3, 1, 'alpha.6')
version_object = semver.VersionInfo(
major=version[0],

View File

@ -1,8 +1,8 @@
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
RUN apt-get install redis-server libffi-dev -y
RUN apt-get install -y redis-server
# create secrets directory
RUN mkdir -vp pgp/keys
@ -14,13 +14,17 @@ ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./
RUN pip install --index-url $PIP_INDEX_URL \
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt
@ -33,7 +37,7 @@ COPY cic_ussd/db/ussd_menu.json data/
COPY docker/*.sh ./
RUN chmod +x /root/*.sh
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
## copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
COPY config/ /usr/local/etc/cic-ussd/
COPY cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic

View File

@ -4,9 +4,9 @@ billiard==3.6.4.0
bcrypt==3.2.0
celery==4.4.7
cffi==1.14.6
cic-eth~=0.12.4a13
cic-notify~=0.4.0a10
cic-types~=0.2.0a6
cic-eth~=0.12.5a1
cic-notify~=0.4.0a11
cic-types~=0.2.1a2
confini>=0.3.6rc4,<0.5.0
phonenumbers==8.12.12
psycopg2==2.8.6

View File

@ -8,16 +8,20 @@ set -a
set -e
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
if [ ! -z $DEV_FEE_PRICE ]; then
gas_price_arg="--gas-price $DEV_FEE_PRICE"
fee_price_arg="--fee-price $DEV_FEE_PRICE"
fi
must_eth_rpc
# Deploy address declarator registry
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mDeploy address declarator contract\033[;39m"
DEV_ADDRESS_DECLARATOR=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG $DEV_DECLARATOR_DESCRIPTION`
DEV_ADDRESS_DECLARATOR=`eth-address-declarator-deploy --nonce $nonce -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG $DEV_DECLARATOR_DESCRIPTION`
check_wait 1
echo -e "\033[;96mWriting env_reset file\033[;39m"
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset

View File

@ -13,48 +13,60 @@ set -e
must_address "$DEV_ADDRESS_DECLARATOR" "address declarator"
must_eth_rpc
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
if [ ! -z $DEV_FEE_PRICE ]; then
gas_price_arg="--gas-price $DEV_FEE_PRICE"
fee_price_arg="--fee-price $DEV_FEE_PRICE"
fi
# Deploy contract registry contract
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mDeploy contract registry contract\033[;39m"
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy $fee_price_arg -i $CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --identifier DefaultToken --address-declarator $DEV_ADDRESS_DECLARATOR -p $RPC_PROVIDER $DEV_DEBUG_FLAG -s -u -w`
CIC_REGISTRY_ADDRESS=`okota-contract-registry-deploy --nonce $nonce $fee_price_arg -i $CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry --identifier DefaultToken --address-declarator $DEV_ADDRESS_DECLARATOR -p $RPC_PROVIDER $DEV_DEBUG_FLAG -s -u -w`
>&2 echo -e "\033[;96mAdd contract registry record to itself\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier ContractRegistry $CIC_REGISTRY_ADDRESS`
advance_nonce
debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier ContractRegistry $CIC_REGISTRY_ADDRESS`
add_pending_tx_hash $r
>&2 echo -e "\033[;96mAdd address declarator record to contract registry\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AddressDeclarator $DEV_ADDRESS_DECLARATOR`
advance_nonce
debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AddressDeclarator $DEV_ADDRESS_DECLARATOR`
add_pending_tx_hash $r
# Deploy transfer authorization contact
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mDeploy transfer authorization contract\033[;39m"
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG`
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy --nonce $nonce -w $gas_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG`
>&2 echo -e "\033[;96mAdd transfer authorization record to contract registry\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
advance_nonce
debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS`
add_pending_tx_hash $r
# Deploy token index contract
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mDeploy token symbol index contract\033[;39m"
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR`
DEV_TOKEN_INDEX_ADDRESS=`okota-token-index-deploy --nonce $nonce -s -w -u $fee_price_arg -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR`
>&2 echo -e "\033[;96mAdd token symbol index record to contract registry\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
advance_nonce
debug_rpc
r=`eth-contract-registry-set $DEV_WAIT_FLAG $fee_price_arg --nonce $nonce -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS`
add_pending_tx_hash $r
#>&2 echo "add reserve token to token index"
#eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
check_wait 2
echo -e "\033[;96mWriting env_reset file\033[;39m"
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset

View File

@ -10,9 +10,9 @@ WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
set -e
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
if [ ! -z $DEV_FEE_PRICE ]; then
gas_price_arg="--gas-price $DEV_FEE_PRICE"
fee_price_arg="--fee-price $DEV_FEE_PRICE"
fi
have_default_token=1
@ -25,16 +25,17 @@ must_eth_rpc
function _deploy_token_defaults {
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo -e "\033[;33mtoken symbol not set, setting defaults for type $TOKEN_TYPE\033[;39m"
>&2 echo -e "\033[;33mToken symbol not set, setting defaults for type $TOKEN_TYPE\033[;39m"
TOKEN_SYMBOL=$1
TOKEN_NAME=$2
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo -e "\033[;33mtoken name not set, setting same as symbol for type $TOKEN_TYPE\033[;39m"
>&2 echo -e "\033[;33mToken name not set, setting same as symbol for type $TOKEN_TYPE\033[;39m"
TOKEN_NAME=$TOKEN_SYMBOL
fi
TOKEN_DECIMALS=${TOKEN_DECIMALS:-6}
default_token_registered=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw DefaultToken`
debug_rpc
default_token_registered=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw DefaultToken --fee-limit 8000000`
if [ $default_token_registered == '0000000000000000000000000000000000000000' ]; then
>&2 echo -e "\033[;33mFound no existing default token in token registry"
have_default_token=''
@ -50,23 +51,31 @@ function _deploy_token_defaults {
function deploy_token_giftable_erc20_token() {
_deploy_token_defaults "GFT" "Giftable Token"
TOKEN_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG`
advance_nonce
debug_rpc
TOKEN_ADDRESS=`giftable-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals $TOKEN_DECIMALS $DEV_DEBUG_FLAG`
}
function deploy_token_erc20_demurrage_token() {
_deploy_token_defaults "DET" "Demurrage Token"
TOKEN_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
advance_nonce
debug_rpc
TOKEN_ADDRESS=`erc20-demurrage-token-deploy --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL $DEV_DEBUG_FLAG -ww -s`
}
function deploy_accounts_index() {
# Deploy accounts index contact
>&2 echo -e "\033[;96mDeploy accounts index contract for token $TOKEN_SYMBOL\033[;39m"
DEV_ACCOUNTS_INDEX_ADDRESS=`okota-accounts-index-deploy $gas_price_arg -u -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR --token-address $1`
advance_nonce
debug_rpc
DEV_ACCOUNTS_INDEX_ADDRESS=`okota-accounts-index-deploy --nonce $nonce $fee_price_arg -u -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --address-declarator $DEV_ADDRESS_DECLARATOR --token-address $1`
if [ -z "$have_default_token" ]; then
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mAdd acccounts index record for default token to contract registry\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AccountRegistry $DEV_ACCOUNTS_INDEX_ADDRESS`
r=`eth-contract-registry-set --nonce $nonce $DEV_WAIT_FLAG $fee_price_arg -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier AccountRegistry $DEV_ACCOUNTS_INDEX_ADDRESS`
add_pending_tx_hash $r
fi
}
@ -75,22 +84,32 @@ function deploy_minter_faucet() {
FAUCET_AMOUNT=${FAUCET_AMOUNT:-0}
# Token faucet contract
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mDeploy token faucet contract for token $TOKEN_SYMBOL\033[;39m"
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry`
faucet_address=`sarafu-faucet-deploy $fee_price_arg -s -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG --account-index-address $accounts_index_address $1`
accounts_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw AccountRegistry --fee-limit 8000000`
faucet_address=`sarafu-faucet-deploy --nonce $nonce $fee_price_arg -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --account-index-address $accounts_index_address $1`
# sarafu-faucet-deploy consumes TWO nonces
advance_nonce
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mSet token faucet amount to $FAUCET_AMOUNT\033[;39m"
r=`sarafu-faucet-set $fee_price_arg -s -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $faucet_address $DEV_DEBUG_FLAG -s --fee-limit 100000 $FAUCET_AMOUNT`
r=`sarafu-faucet-set --nonce $nonce $fee_price_arg $DEV_WAIT_FLAG -s -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $faucet_address $DEV_DEBUG_FLAG --fee-limit 100000 $FAUCET_AMOUNT`
add_pending_tx_hash $r
if [ -z $have_default_token ]; then
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mRegister faucet in registry\033[;39m"
r=`eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier Faucet $faucet_address`
r=`eth-contract-registry-set --nonce $nonce $DEV_WAIT_FLAG -s -u $fee_price_arg -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier Faucet $faucet_address`
add_pending_tx_hash $r
fi
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mSet faucet as token minter\033[;39m"
r=`giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $TOKEN_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG $faucet_address`
r=`giftable-token-minter $DEV_WAIT_FLAG --nonce $nonce -s -u $fee_price_arg -y $WALLET_KEY_FILE -e $TOKEN_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG $faucet_address`
add_pending_tx_hash $r
}
@ -99,21 +118,26 @@ TOKEN_TYPE=${TOKEN_TYPE:-giftable_erc20_token}
deploy_token_${TOKEN_TYPE}
if [ -z "$have_default_token" ]; then
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mAdd default token to contract registry\033[;39m"
r=`eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier DefaultToken $TOKEN_ADDRESS`
r=`eth-contract-registry-set $DEV_WAIT_FLAG --nonce $nonce $fee_price_arg -s -u -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG --identifier DefaultToken $TOKEN_ADDRESS`
add_pending_tx_hash $r
fi
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mAdd token symbol $TOKEN_SYMBOL to token address $TOKEN_ADDRESS mapping to token index\033[;39m"
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS $DEV_DEBUG_FLAG --raw TokenRegistry`
r=`eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $token_index_address $TOKEN_ADDRESS`
r=`eth-token-index-add --nonce $nonce $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER $DEV_DEBUG_FLAG -e $token_index_address $TOKEN_ADDRESS`
add_pending_tx_hash $r
TOKEN_MINT_AMOUNT=${TOKEN_MINT_AMOUNT:-${DEV_TOKEN_MINT_AMOUNT}}
advance_nonce
debug_rpc
>&2 echo -e "\033[;96mMinting $TOKEN_MINT_AMOUNT tokens\033[;39m"
r=`giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -u $DEV_DEBUG_FLAG -s -w -e $TOKEN_ADDRESS "$DEV_TOKEN_MINT_AMOUNT"`
r=`giftable-token-gift $DEV_WAIT_FLAG --nonce $nonce $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CHAIN_SPEC -u $DEV_DEBUG_FLAG -s -e $TOKEN_ADDRESS "$DEV_TOKEN_MINT_AMOUNT"`
add_pending_tx_hash $r
@ -128,6 +152,7 @@ else
deploy_minter_${TOKEN_MINTER_MODE} $TOKEN_ADDRESS
fi
check_wait 3
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset

View File

@ -10,9 +10,9 @@ WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
set -e
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
if [ ! -z $DEV_FEE_PRICE ]; then
gas_price_arg="--gas-price $DEV_FEE_PRICE"
fee_price_arg="--fee-price $DEV_FEE_PRICE"
fi
must_address "$CIC_REGISTRY_ADDRESS" "registry"
@ -42,12 +42,13 @@ add_pending_tx_hash $r
# Transfer gas to custodial gas provider adddress
advance_nonce
>&2 echo -e "\033[;96mGift gas to gas gifter $gas_gifter\033[;39m"
echo "eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT"
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $gas_gifter $DEV_GAS_AMOUNT`
add_pending_tx_hash $r
>&2 echo -e "\033[;96mgift gas to accounts index owner $accounts_index_writer\033[;39m"
advance_nonce
# for now we are using the same key for both
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
r=`eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $DEV_DEBUG_FLAG -a $accounts_index_writer $DEV_GAS_AMOUNT`
@ -59,6 +60,7 @@ cic-eth-ctl -vv -i $CHAIN_SPEC unlock INIT
cic-eth-ctl -vv -i $CHAIN_SPEC unlock SEND
cic-eth-ctl -vv -i $CHAIN_SPEC unlock QUEUE
check_wait 4
>&2 echo -e "\033[;96mWriting env_reset file\033[;39m"
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset

View File

@ -10,9 +10,9 @@ WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
set -e
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
if [ ! -z $DEV_FEE_PRICE ]; then
gas_price_arg="--gas-price $DEV_FEE_PRICE"
fee_price_arg="--fee-price $DEV_FEE_PRICE"
fi
must_address "$CIC_REGISTRY_ADDRESS" "registry"

View File

@ -8,6 +8,16 @@ else
mkdir -p $DEV_DATA_DIR
fi
# Handle wallet
export WALLET_KEY_FILE=${WALLET_KEY_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
if [ ! -f $WALLET_KEY_FILE ]; then
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
exit 1
fi
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
noncefile=${DEV_DATA_DIR}/nonce_${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
# By default configuration values generated from previous runs will be used in subsequent invocations
# Setting the config reset
if [ -z $DEV_CONFIG_RESET ]; then
@ -17,18 +27,15 @@ if [ -z $DEV_CONFIG_RESET ]; then
fi
else
>&2 echo -e "\033[;33mGenerating scratch configuration\033[;39m"
bash_debug_flag=""
if [ "$DEV_DEBUG_LEVEL" -gt 1 ]; then
bash_debug_flag="-v"
fi
rm $bash_debug_flag -f ${DEV_DATA_DIR}/env_reset
rm $bash_debug_flag -f $noncefile
confini-dump --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
fi
# Handle wallet
export WALLET_KEY_FILE=${WALLET_KEY_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
if [ ! -f $WALLET_KEY_FILE ]; then
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
exit 1
fi
#export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
# Wallet dependent variable defaults
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
export DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
@ -36,6 +43,14 @@ export CIC_TRUST_ADDRESS=${CIC_TRUST_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
if [ ! -f $noncefile ]; then
nonce=`eth-count -p $RPC_PROVIDER $DEV_DEBUG_FLAG $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
>&2 echo -e "\033[;96mUsing contract deployer address $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER with nonce $nonce\033[;39m"
echo -n $nonce > $noncefile
else
nonce=`cat $noncefile`
>&2 echo -e "\033[;96mResuming usage with contract deployer address $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER with nonce $nonce\033[;39m"
fi
# Migration variable processing
confini-dump --schema-dir ./config > ${DEV_DATA_DIR}/env_reset

View File

@ -2,7 +2,8 @@
eth_account_contract_deployer =
token_mint_amount = 10000000000000000000000000000000000
gas_amount = 100000000000000000000000
eth_gas_price =
fee_limit_call =
fee_price =
data_dir =
address_declarator =
declarator_description = 0x546869732069732074686520434943206e6574776f726b000000000000000000

View File

@ -1,6 +1,6 @@
ARG DEV_DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
FROM $DEV_DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-55da5f4e
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
WORKDIR /root
@ -12,13 +12,10 @@ RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C20
RUN mkdir -vp /usr/local/etc/cic
ENV CONFINI_DIR /usr/local/etc/cic/
COPY config_template/ /usr/local/etc/cic/
COPY requirements.txt .
RUN apt-get install libffi-dev
#RUN apt-get install libffi-dev
ARG pip_index_url=https://pypi.org/simple
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
@ -27,10 +24,19 @@ ARG PIP_INDEX_URL="https://pypi.org/simple"
ARG pip_trusted_host=pypi.org
RUN pip install --index-url $PIP_INDEX_URL \
--pre \
--force-reinstall \
--no-cache \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt
COPY override_requirements.txt .
RUN pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
--force-reinstall \
--no-cache \
-r override_requirements.txt
RUN pip freeze
COPY . .
RUN chmod +x *.sh

View File

@ -0,0 +1 @@
chainlib-eth==0.0.10a15

View File

@ -1,6 +1,5 @@
cic-eth[tools]==0.12.4a13
chainlib-eth>=0.0.10a5,<0.1.0
chainlib==0.0.10a3,<0.1.0
chainlib-eth>=0.0.10a15,<0.1.0
eth-erc20>=0.1.2a3,<0.2.0
erc20-demurrage-token>=0.0.5a2,<0.1.0
eth-address-index>=0.2.4a1,<0.3.0
@ -9,6 +8,5 @@ erc20-transfer-authorization>=0.3.5a2,<0.4.0
erc20-faucet>=0.3.2a2,<0.4.0
sarafu-faucet>=0.0.7a2,<0.1.0
confini>=0.4.2rc3,<1.0.0
crypto-dev-signer>=0.4.15rc2,<=0.4.15
eth-token-index>=0.2.4a1,<=0.3.0
okota>=0.2.4a15,<0.3.0

View File

@ -4,13 +4,18 @@
set -a
DEV_DEBUG_FLAG=""
DEV_DEBUG_LEVEL=${DEV_DEBUG_LEVEL=0}
if [ $DEV_DEBUG_LEVEL -eq 1 ]; then
DEV_DEBUG_LEVEL=${DEV_DEBUG_LEVEL:-0}
if [ "$DEV_DEBUG_LEVEL" -eq 1 ]; then
DEV_DEBUG_FLAG="-v"
elif [ $DEV_DEBUG_LEVEL -gt 1 ]; then
elif [ "$DEV_DEBUG_LEVEL" -gt 1 ]; then
DEV_DEBUG_FLAG="-vv"
fi
DEV_WAIT_FLAG=""
if [ ! -z "$DEV_TX_WAIT" ]; then
DEV_WAIT_FLAG="-w"
fi
# disable override of config schema directory
unset CONFINI_DIR
@ -34,6 +39,14 @@ confini-dump --schema-dir ./config
clear_pending_tx_hashes
RUN_MASK_HIGHEST=0
for ((i=$LAST_BIT_POS; i>0; i--)); do
b=$((2**$((i-1))))
if [ $((b & $RUN_MASK)) -gt 0 ]; then
RUN_MASK_HIGHEST=$i
break
fi
done
bit=1
for ((i=0; i<$LAST_BIT_POS; i++)); do

View File

@ -32,11 +32,39 @@ function must_eth_rpc() {
function clear_pending_tx_hashes() {
truncate -s 0 $DEV_DATA_DIR/hashes
>&2 echo -e "\033[;96mClearing pending hashes\033[;39m"
truncate -s 0 ${DEV_DATA_DIR}/hashes
}
function add_pending_tx_hash() {
must_hash_256 $1
echo $1 >> $DEV_DATA_DIR/hashes
echo $1 >> ${DEV_DATA_DIR}/hashes
}
function advance_nonce() {
nonce=`cat $noncefile`
next_nonce=$((nonce+1))
echo -n $next_nonce > $noncefile
if [ "$DEV_DEBUG_LEVEL" -gt 1 ]; then
>&2 echo retrieved nonce $nonce
fi
}
function debug_rpc() {
if [ "$DEV_DEBUG_LEVEL" -gt 2 ]; then
>&2 echo -e "\033[;35mRPC Node state\033[;39m"
>&2 eth-info --local -p $RPC_PROVIDER
fi
}
function check_wait() {
#if [ "$1" -eq "$RUN_MASK_HIGHEST" ]; then
>&2 echo -e "\033[;96mCatch up with paralell transactions\033[;39m"
if [ "$DEV_DEBUG_LEVEL" -gt "0" ]; then
>&2 cat ${DEV_DATA_DIR}/hashes
fi
eth-wait $DEV_DEBUG_FLAG -p $RPC_PROVIDER ${DEV_DATA_DIR}/hashes
clear_pending_tx_hashes
#fi
}

View File

@ -65,7 +65,7 @@ args_override = {
'REDIS_DB': getattr(args, 'redis_db'),
'META_HOST': getattr(args, 'meta_host'),
'META_PORT': getattr(args, 'meta_port'),
'KEYSTORE_FILE_PATH': getattr(args, 'y')
'WALLET_KEY_FILE': getattr(args, 'y')
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')

View File

@ -72,7 +72,7 @@ config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug(f'config loaded from {args.c}:\n{config}')
dirs = initialize_dirs(args.import_dir, force_reset=args.f)
dirs = initialize_dirs(args.import_dir)
valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")

View File

@ -12,11 +12,13 @@ def initialize_dirs(user_dir, force_reset=False):
dirs['new'] = os.path.join(user_dir, 'new')
dirs['meta'] = os.path.join(user_dir, 'meta')
dirs['custom'] = os.path.join(user_dir, 'custom')
dirs['phone'] = os.path.join(user_dir, 'phone')
dirs['preferences'] = os.path.join(user_dir, 'preferences')
dirs['txs'] = os.path.join(user_dir, 'txs')
dirs['keyfile'] = os.path.join(user_dir, 'keystore')
dirs['custom_new'] = os.path.join(dirs['custom'], 'new')
dirs['custom_meta'] = os.path.join(dirs['custom'], 'meta')
dirs['phone_meta'] = os.path.join(dirs['phone'], 'meta')
dirs['preferences_meta'] = os.path.join(dirs['preferences'], 'meta')
dirs['preferences_new'] = os.path.join(dirs['preferences'], 'new')
@ -37,6 +39,6 @@ def initialize_dirs(user_dir, force_reset=False):
for d in dirs.keys():
if d == 'old':
continue
os.makedirs(dirs[d])
os.makedirs(dirs[d], exist_ok=True)
return dirs

View File

@ -1,2 +1,2 @@
[rpc]
provider =
provider = http://localhost:63545

View File

@ -37,11 +37,11 @@ if [ "$INCLUDE_BALANCES" != "y" ]
then
echo -e "\033[;96mRunning worker without opening balance transactions\033[;96m"
TARGET_TX_COUNT=$NUMBER_OF_USERS
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$WALLET_KEY_FILE" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
else
echo -e "\033[;96mRunning worker with opening balance transactions\033[;96m"
TARGET_TX_COUNT=$((NUMBER_OF_USERS*2))
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" &
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$WALLET_KEY_FILE" "$OUT_DIR" &
fi
echo -e "\033[;96mTarget count set to ${TARGET_TX_COUNT}"

View File

@ -8,7 +8,7 @@ volumes:
services:
evm:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/bloxberg-node:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/bloxberg-node:${TAG:-latest}
build:
context: apps/bloxbergValidatorSetup
restart: unless-stopped
@ -39,7 +39,7 @@ services:
command: "--loglevel verbose"
bootstrap:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/contract-migration:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/contract-migration:${TAG:-latest}
build:
context: apps/contract-migration
dockerfile: docker/Dockerfile
@ -51,6 +51,11 @@ services:
environment:
DEV_DATA_DIR: ${DEV_DATA_DIR:-/tmp/cic/config}
DEV_CONFIG_RESET: $DEV_CONFIG_RESET
DEV_FEE_PRICE: $DEV_FEE_PRICE
DEV_FEE_LIMIT_CALL: ${DEV_FEE_LIMIT_CALL:-8000000}
DEV_DEBUG_LEVEL: ${DEV_DEBUG_LEVEL:-0}
DEV_TX_WAIT: $DEV_TX_WAIT
DEV_GAS_AMOUNT: $DEV_GAS_AMOUNT
RPC_PROVIDER: ${RPC_PROVIDER:-http://evm:8545}
CHAIN_SPEC: ${CHAIN_SPEC:-evm:byzantium:8996:bloxberg}
REDIS_HOST: ${REDIS_HOST:-redis}
@ -70,6 +75,7 @@ services:
REDIS_HOST_CALLBACK: ${REDIS_HOST_CALLBACK:-redis}
REDIS_PORT_CALLBACK: ${REDIS_PORT_CALLBACK:-6379}
FAUCET_AMOUNT: ${FAUCET_AMOUNT:-0}
WALLET_KEY_FILE: ${WALLET_KEY_FILE:-/root/keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c}
command: ["./run_job.sh"]
depends_on:
- evm
@ -433,13 +439,14 @@ services:
# metadata replacement server for swarm
cic-meta-server:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-meta:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/cic-meta:${TAG:-latest}
hostname: meta
build:
context: apps/cic-meta
dockerfile: docker/Dockerfile
args:
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
NPM_REPOSITORY: ${DEV_NPM_REPOSITORY:-https://registry.npmjs.org}
environment:
DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432}
@ -469,7 +476,7 @@ services:
- ./apps/contract-migration/testdata/pgp/:/tmp/cic/pgp
cic-user-tasker:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-user:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/cic-user:${TAG:-latest}
build:
context: apps/cic-ussd
dockerfile: docker/Dockerfile
@ -504,7 +511,7 @@ services:
command: "/root/start_cic_user_tasker.sh -q cic-ussd -vv"
cic-user-server:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-user:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/cic-user:${TAG:-latest}
build:
context: apps/cic-ussd
dockerfile: docker/Dockerfile
@ -532,7 +539,7 @@ services:
command: "/root/start_cic_user_server.sh -vv"
cic-user-ussd-server:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-user:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/cic-user:${TAG:-latest}
build:
context: apps/cic-ussd
dockerfile: docker/Dockerfile
@ -569,7 +576,7 @@ services:
command: "/root/start_cic_user_ussd_server.sh -vv"
cic-notify-tasker:
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics/cic-internal-integration}/cic-notify:${TAG:-latest}
image: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}/cic-notify:${TAG:-latest}
build:
context: apps/cic-notify
dockerfile: docker/Dockerfile

View File

@ -6,4 +6,7 @@ set -e
TAG=${TAG?Variable not set} \
docker-compose \
-f docker-compose.yml \
build
build \
--no-cache \
--parallel \
--progress plain