Merge branch 'master' of gitlab.com:grassrootseconomics/cic-internal-integration into philip/multi-token-v1

This commit is contained in:
PhilipWafula 2021-10-27 11:53:53 +03:00
commit d2b811c124
Signed by untrusted user: mango-habanero
GPG Key ID: B00CE9034DA19FB7
23 changed files with 5944 additions and 152 deletions

View File

@ -1,4 +1,3 @@
@node cic-eth-accounts
@section Accounts @section Accounts
Accounts are private keys in the signer component keyed by "addresses," a one-way transformation of a public key. Data can be signed by using the account as identifier for corresponding RPC requests. Accounts are private keys in the signer component keyed by "addresses," a one-way transformation of a public key. Data can be signed by using the account as identifier for corresponding RPC requests.

View File

@ -1,4 +1,4 @@
@node cic-eth system maintenance @anchor{cic-eth-appendix-system-maintenance}
@appendix Admin API @appendix Admin API
The admin API is still in an early stage of refinement. User friendliness can be considerably improved. The admin API is still in an early stage of refinement. User friendliness can be considerably improved.
@ -33,7 +33,7 @@ Get the current state of a lock
@appendixsection tag_account @appendixsection tag_account
Associate an identifier with an account address (@xref{cic-eth system accounts}) Associate an identifier with an account address (@xref{cic-eth-system-accounts})
@appendixsection have_account @appendixsection have_account

View File

@ -14,5 +14,6 @@ Released 2021 under GPL3
@c @c
@contents @contents
@include index.texi @include content.texi
@include appendix.texi

View File

@ -0,0 +1,3 @@
@include admin.texi
@include chains.texi
@include transfertypes.texi

View File

@ -1,4 +1,4 @@
@node cic-eth Appendix Task chains @anchor{cic-eth-appendix-task-chains}
@appendix Task chains @appendix Task chains
TBC - explain here how to generate these chain diagrams TBC - explain here how to generate these chain diagrams

View File

@ -1,4 +1,3 @@
@node cic-eth configuration
@section Configuration @section Configuration
Configuration parameters are grouped by configuration filename. Configuration parameters are grouped by configuration filename.

View File

@ -1,6 +1,6 @@
@node cic-eth
@top cic-eth @top cic-eth
@include intro.texi
@include dependencies.texi @include dependencies.texi
@include configuration.texi @include configuration.texi
@include system.texi @include system.texi
@ -9,6 +9,3 @@
@include incoming.texi @include incoming.texi
@include services.texi @include services.texi
@include tools.texi @include tools.texi
@include admin.texi
@include chains.texi
@include transfertypes.texi

View File

@ -1,4 +1,3 @@
@node cic-eth-dependencies
@section Dependencies @section Dependencies
This application is written in Python 3.8. It is tightly coupled with @code{python-celery}, which provides the task worker ecosystem. It also uses @code{SQLAlchemy} which provides useful abstractions for persistent storage though SQL, and @code{alembic} for database schema migrations. This application is written in Python 3.8. It is tightly coupled with @code{python-celery}, which provides the task worker ecosystem. It also uses @code{SQLAlchemy} which provides useful abstractions for persistent storage though SQL, and @code{alembic} for database schema migrations.

View File

@ -1,4 +1,4 @@
@node cic-eth-incoming @anchor{cic-eth-incoming}
@section Incoming transactions @section Incoming transactions
All transactions in mined blocks will be passed to a selection of plugin filters to the @code{chainsyncer} component. Each of these filters are individual python module files in @code{cic_eth.runnable.daemons.filters}. This section describes their function. All transactions in mined blocks will be passed to a selection of plugin filters to the @code{chainsyncer} component. Each of these filters are individual python module files in @code{cic_eth.runnable.daemons.filters}. This section describes their function.

View File

@ -1,9 +1,8 @@
@node cic-eth-interacting
@section Interacting with the system @section Interacting with the system
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth Appendix Task chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them. The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth-appendix-task-chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth system maintenance,the Admin API appendix}. There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth-appendix-system-maintenance,the Admin API appendix}.
@subsection Interface @subsection Interface

View File

@ -1,4 +1,3 @@
@node cic-eth-outgoing
@section Outgoing transactions @section Outgoing transactions
@strong{Important! A pre-requisite for proper functioning of the component is that no other agent is sending transactions to the network for any of the keys in the keystore.} @strong{Important! A pre-requisite for proper functioning of the component is that no other agent is sending transactions to the network for any of the keys in the keystore.}

View File

@ -1,4 +1,3 @@
@node cic-eth-services
@section Services @section Services
There are four daemons that together orchestrate all of the aforementioned recipes. This section will provide a high level description of them. There are four daemons that together orchestrate all of the aforementioned recipes. This section will provide a high level description of them.

View File

@ -1,10 +1,10 @@
@node cic-eth system accounts
@section System initialization @section System initialization
When the system starts for the first time, it is locked for any state change request other than account creation@footnote{Specifically, the @code{INIT}, @code{SEND} and @code{QUEUE} lock bits are set.}. These locks should be @emph{reset} once system initialization has been completed. Currently, system initialization only involves creating and tagging required system accounts, as specified below. When the system starts for the first time, it is locked for any state change request other than account creation@footnote{Specifically, the @code{INIT}, @code{SEND} and @code{QUEUE} lock bits are set.}. These locks should be @emph{reset} once system initialization has been completed. Currently, system initialization only involves creating and tagging required system accounts, as specified below.
See @ref{cic-eth-locking,Locking} and @ref{cic-eth-tools-ctrl,ctrl in Tools} for details on locking. See @ref{cic-eth-locking,Locking} and @ref{cic-eth-tools-ctrl,ctrl in Tools} for details on locking.
@anchor{cic-eth-system-accounts}
@subsection System accounts @subsection System accounts
Certain accounts in the system have special roles. These are defined by @emph{tagging} certain accounts addresses with well-known identifiers. Certain accounts in the system have special roles. These are defined by @emph{tagging} certain accounts addresses with well-known identifiers.

View File

@ -1,4 +1,3 @@
@node cic-eth-tools
@section Tools @section Tools
A collection of CLI tools have been provided to help with diagnostics and other administrative tasks. These use the same configuration infrastructure as the daemons. A collection of CLI tools have been provided to help with diagnostics and other administrative tasks. These use the same configuration infrastructure as the daemons.
@ -37,7 +36,7 @@ Execute a token transfer on behalf of a custodial account.
@subsection tag (cic-eth-tag) @subsection tag (cic-eth-tag)
Associate an account address with a string identifier. @xref{cic-eth system accounts} Associate an account address with a string identifier. @xref{cic-eth-system-accounts}
@anchor{cic-eth-tools-ctrl} @anchor{cic-eth-tools-ctrl}

View File

@ -1,4 +1,3 @@
@node cic-eth Appendix Transaction types
@appendix Transfer types @appendix Transfer types
@table @var @table @var

View File

@ -5,15 +5,19 @@ WORKDIR /root
RUN apk add --no-cache postgresql bash RUN apk add --no-cache postgresql bash
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
RUN npm config set snyk=false
#RUN npm config set registry={NPM_REPOSITORY}
RUN npm config set registry=${NPM_REPOSITORY}
# copy the dependencies # copy the dependencies
COPY package.json package-lock.json ./ COPY package.json package-lock.json ./
#RUN --mount=type=cache,mode=0755,target=/root/.npm \ RUN --mount=type=cache,mode=0755,target=/root/.npm \
RUN npm set cache /root/.npm && \ npm set cache /root/.npm && \
npm cache verify && \
npm ci --verbose npm ci --verbose
COPY webpack.config.js . COPY webpack.config.js ./
COPY tsconfig.json . COPY tsconfig.json ./
## required to build the cic-client-meta module ## required to build the cic-client-meta module
COPY . . COPY . .
COPY tests/*.asc /root/pgp/ COPY tests/*.asc /root/pgp/

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,9 @@
create table if not exists store ( create table if not exists store (
id serial primary key not null, id serial primary key not null,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text
); );
create index if not exists idx_fp on store ((lower(owner_fingerprint))); create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,9 +1,10 @@
create table if not exists store ( create table if not exists store (
/*id serial primary key not null,*/ /*id serial primary key not null,*/
id integer primary key autoincrement, id integer primary key autoincrement,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text
); );
create index if not exists idx_fp on store ((lower(owner_fingerprint))); create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,12 +1,13 @@
import * as Automerge from 'automerge'; import * as Automerge from 'automerge';
import * as pgp from 'openpgp'; import * as pgp from 'openpgp';
import * as crypto from 'crypto';
import { Envelope, Syncable } from '@cicnet/crdt-meta'; import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
function handleNoMergeGet(db, digest, keystore) { function handleNoMergeGet(db, digest, keystore) {
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'"; const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
return new Promise<string|boolean>((whohoo, doh) => { return new Promise<any>((whohoo, doh) => {
db.query(sql, (e, rs) => { db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) { if (e !== null && e !== undefined) {
doh(e); doh(e);
@ -16,16 +17,36 @@ function handleNoMergeGet(db, digest, keystore) {
return; return;
} }
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
let mimeType;
if (immutable) {
if (rs.rows[0]['mime_type'] === undefined) {
mimeType = 'application/octet-stream';
} else {
mimeType = rs.rows[0]['mime_type'];
}
} else {
mimeType = 'application/json';
}
const cipherText = rs.rows[0]['content']; const cipherText = rs.rows[0]['content'];
pgp.message.readArmored(cipherText).then((m) => { pgp.message.readArmored(cipherText).then((m) => {
const opts = { const opts = {
message: m, message: m,
privateKeys: [keystore.getPrivateKey()], privateKeys: [keystore.getPrivateKey()],
format: 'binary',
}; };
pgp.decrypt(opts).then((plainText) => { pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data); let r;
const r = JSON.stringify(o.m['data']); if (immutable) {
whohoo(r); r = plainText.data;
} else {
mimeType = 'application/json';
const d = new TextDecoder().decode(plainText.data);
const o = Syncable.fromJSON(d);
r = JSON.stringify(o.m['data']);
}
whohoo([r, mimeType]);
}).catch((e) => { }).catch((e) => {
console.error('decrypt', e); console.error('decrypt', e);
doh(e); doh(e);
@ -57,6 +78,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
} else { } else {
e = Envelope.fromJSON(v); e = Envelope.fromJSON(v);
s = e.unwrap(); s = e.unwrap();
console.debug('s', s, o)
s.replace(o, 'server merge'); s.replace(o, 'server merge');
e.set(s); e.set(s);
s.onwrap = (e) => { s.onwrap = (e) => {
@ -139,7 +161,13 @@ function handleClientMergeGet(db, digest, keystore) {
privateKeys: [keystore.getPrivateKey()], privateKeys: [keystore.getPrivateKey()],
}; };
pgp.decrypt(opts).then((plainText) => { pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data); let d;
if (typeof(plainText.data) == 'string') {
d = plainText.data;
} else {
d = new TextDecoder().decode(plainText.data);
}
const o = Syncable.fromJSON(d);
const e = new Envelope(o); const e = new Envelope(o);
whohoo(e.toJSON()); whohoo(e.toJSON());
}).catch((e) => { }).catch((e) => {
@ -201,10 +229,65 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
}); });
} }
function handleImmutablePost(data, db, digest, keystore, contentType) {
return new Promise<Array<string|boolean>>((whohoo, doh) => {
let data_binary = data;
const h = crypto.createHash('sha256');
h.update(data_binary);
const z = h.digest();
const r = bytesToHex(z);
if (digest) {
if (r != digest) {
doh('hash mismatch: ' + r + ' != ' + digest);
return;
}
} else {
digest = r;
console.debug('calculated digest ' + digest);
}
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
if (haveDigest !== false) {
whohoo([false, digest]);
return;
}
let message;
if (typeof(data) == 'string') {
data_binary = new TextEncoder().encode(data);
message = pgp.message.fromText(data);
} else {
message = pgp.message.fromBinary(data);
}
const opts = {
message: message,
publicKeys: keystore.getEncryptKeys(),
};
pgp.encrypt(opts).then((cipherText) => {
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
return;
}
whohoo([true, digest]);
});
}).catch((e) => {
doh(e);
});
}).catch((e) => {
doh(e);
});
});
}
export { export {
handleClientMergePut, handleClientMergePut,
handleClientMergeGet, handleClientMergeGet,
handleServerMergePost, handleServerMergePost,
handleServerMergePut, handleServerMergePut,
handleNoMergeGet, handleNoMergeGet,
handleImmutablePost,
}; };

View File

@ -118,37 +118,71 @@ async function processRequest(req, res) {
return; return;
} }
let mod = req.method.toLowerCase() + ":automerge:";
let modDetail = undefined;
let immutablePost = false;
try { try {
digest = parseDigest(req.url); digest = parseDigest(req.url);
} catch(e) { } catch(e) {
console.error('digest error: ' + e) if (req.url == '/') {
immutablePost = true;
modDetail = 'immutable';
} else {
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
res.writeHead(400, {"Content-Type": "text/plain"}); res.writeHead(400, {"Content-Type": "text/plain"});
res.end(); res.end();
return; return;
} }
const mergeHeader = req.headers['x-cic-automerge'];
let mod = req.method.toLowerCase() + ":automerge:";
switch (mergeHeader) {
case "client":
mod += "client"; // client handles merges
break;
case "server":
mod += "server"; // server handles merges
break;
default:
mod += "none"; // merged object only (get only)
} }
let data = ''; if (modDetail === undefined) {
const mergeHeader = req.headers['x-cic-automerge'];
switch (mergeHeader) {
case "client":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "client"; // client handles merges
break;
case "server":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "server"; // server handles merges
break;
case "immutable":
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
break;
default:
modDetail = "none"; // merged object only (get only)
}
}
mod += modDetail;
// handle bigger chunks of data
let data;
req.on('data', (d) => { req.on('data', (d) => {
if (data === undefined) {
data = d;
} else {
data += d; data += d;
}
}); });
req.on('end', async () => { req.on('end', async (d) => {
console.debug('mode', mod); let inputContentType = req.headers['content-type'];
let content = ''; let debugString = 'executing mode ' + mod ;
if (data !== undefined) {
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
}
console.debug(debugString);
let content;
let contentType = 'application/json'; let contentType = 'application/json';
console.debug('handling data', data); let statusCode = 200;
let r:any = undefined; let r:any = undefined;
try { try {
switch (mod) { switch (mod) {
@ -176,6 +210,7 @@ async function processRequest(req, res) {
res.end(); res.end();
return; return;
} }
content = '';
break; break;
//case 'get:automerge:server': //case 'get:automerge:server':
// content = await handlers.handleServerMergeGet(db, digest, keystore); // content = await handlers.handleServerMergeGet(db, digest, keystore);
@ -183,12 +218,24 @@ async function processRequest(req, res) {
case 'get:automerge:none': case 'get:automerge:none':
r = await handlers.handleNoMergeGet(db, digest, keystore); r = await handlers.handleNoMergeGet(db, digest, keystore);
if (r == false) { if (r === false) {
res.writeHead(404, {"Content-Type": "text/plain"}); res.writeHead(404, {"Content-Type": "text/plain"});
res.end(); res.end();
return; return;
} }
content = r; content = r[0];
contentType = r[1];
break;
case 'post:automerge:immutable':
if (inputContentType === undefined) {
inputContentType = 'application/octet-stream';
}
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
if (r[0]) {
statusCode = 201;
}
content = r[1];
break; break;
default: default:
@ -210,8 +257,15 @@ async function processRequest(req, res) {
return; return;
} }
const responseContentLength = (new TextEncoder().encode(content)).length; //let responseContentLength;
res.writeHead(200, { //if (typeof(content) == 'string') {
// (new TextEncoder().encode(content)).length;
//}
const responseContentLength = content.length;
//if (responseContentLength === undefined) {
// responseContentLength = 0;
//}
res.writeHead(statusCode, {
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Content-Type": contentType, "Content-Type": contentType,
"Content-Length": responseContentLength, "Content-Length": responseContentLength,

View File

@ -7,6 +7,8 @@ import * as handlers from '../scripts/server/handlers';
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta'; import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
import { SqliteAdapter } from '../src/db'; import { SqliteAdapter } from '../src/db';
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
function createKeystore() { function createKeystore() {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8'); const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8'); const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
@ -44,11 +46,13 @@ function createDatabase(sqlite_file:string):Promise<any> {
// doh(e); // doh(e);
// return; // return;
// } // }
// get this from real sql files sources
const sql = `CREATE TABLE store ( const sql = `CREATE TABLE store (
id integer primary key autoincrement, id integer primary key autoincrement,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text default null
); );
` `
@ -111,15 +115,18 @@ describe('server', async () => {
let j = env.toJSON(); let j = env.toJSON();
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer); const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(content); // true-ish assert(content); // true-ish
console.debug('content', content);
let v = await handlers.handleNoMergeGet(db, digest, keystore); let v = await handlers.handleNoMergeGet(db, digest, keystore);
if (v === undefined) { if (v === false) {
db.close(); db.close();
assert.fail(''); assert.fail('');
} }
db.close();
return;
v = await handlers.handleClientMergeGet(db, digest, keystore); v = await handlers.handleClientMergeGet(db, digest, keystore);
if (v === undefined) { if (v === false) {
db.close(); db.close();
assert.fail(''); assert.fail('');
} }
@ -187,7 +194,7 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore); j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish assert(v); // true-ish
let o = JSON.parse(j); let o = JSON.parse(j[0]);
o.bar = 'xyzzy'; o.bar = 'xyzzy';
j = JSON.stringify(o); j = JSON.stringify(o);
@ -212,82 +219,39 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore); j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish assert(j); // true-ish
o = JSON.parse(j); o = JSON.parse(j[0]);
console.log(o); console.log(o);
db.close(); db.close();
}); });
await it('server_merge', async () => { // await it('server_merge', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = 'deadbeef';
let s = new Syncable(digest, {
bar: 'baz',
});
let env = await wrap(s, signer)
let j:any = env.toJSON();
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(v); // true-ish
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
o.bar = 'xyzzy';
j = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
console.log(o);
db.close();
});
// await it('server_merge_empty', async () => {
// const keystore = await createKeystore(); // const keystore = await createKeystore();
// const signer = new PGPSigner(keystore); // const signer = new PGPSigner(keystore);
// //
// const db = await createDatabase(__dirname + '/db.three.sqlite'); // const db = await createDatabase(__dirname + '/db.three.sqlite');
// //
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; // const digest = 'deadbeef';
// let o:any = { // let s = new Syncable(digest, {
// foo: 'bar', // bar: 'baz',
// xyzzy: 42, // });
// } // let env = await wrap(s, signer)
// let j:any = JSON.stringify(o); // let j:any = env.toJSON();
//
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
// assert(v); // true-ish
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
// assert(v); // true-ish
//
// let o = JSON.parse(j);
// o.bar = 'xyzzy';
// j = JSON.stringify(o);
// //
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer); // let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
// assert(signMaterial) // assert(signMaterial)
// //
// const env = Envelope.fromJSON(signMaterial); // env = Envelope.fromJSON(signMaterial);
// //
// console.log('envvvv', env); // console.log('envvvv', env);
// //
@ -301,7 +265,7 @@ describe('server', async () => {
// j = JSON.stringify(o); // j = JSON.stringify(o);
// console.log(j); // console.log(j);
// //
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer); // v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// assert(v); // assert(v);
// //
// j = await handlers.handleNoMergeGet(db, digest, keystore); // j = await handlers.handleNoMergeGet(db, digest, keystore);
@ -311,5 +275,88 @@ describe('server', async () => {
// //
// db.close(); // db.close();
// }); // });
//
await it('server_merge_empty', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
let o:any = {
foo: 'bar',
xyzzy: 42,
}
let j:any = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
const env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('immutable_nodigest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
let r;
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
const b:Uint8Array = new TextEncoder().encode(s);
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
await it('immutable_digest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
const b:Uint8Array = new TextEncoder().encode(s);
let r;
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(!r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
}); });

View File

@ -450,6 +450,7 @@ services:
dockerfile: docker/Dockerfile dockerfile: docker/Dockerfile
args: args:
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics} DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
NPM_REPOSITORY: ${DEV_NPM_REPOSITORY:-https://registry.npmjs.org}
environment: environment:
DATABASE_HOST: ${DATABASE_HOST:-postgres} DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432} DATABASE_PORT: ${DATABASE_PORT:-5432}