Merge branch 'master' of gitlab.com:grassrootseconomics/cic-internal-integration into philip/multi-token-v1
This commit is contained in:
commit
d2b811c124
@ -1,4 +1,3 @@
|
||||
@node cic-eth-accounts
|
||||
@section Accounts
|
||||
|
||||
Accounts are private keys in the signer component keyed by "addresses," a one-way transformation of a public key. Data can be signed by using the account as identifier for corresponding RPC requests.
|
||||
|
@ -1,4 +1,4 @@
|
||||
@node cic-eth system maintenance
|
||||
@anchor{cic-eth-appendix-system-maintenance}
|
||||
@appendix Admin API
|
||||
|
||||
The admin API is still in an early stage of refinement. User friendliness can be considerably improved.
|
||||
@ -33,7 +33,7 @@ Get the current state of a lock
|
||||
|
||||
@appendixsection tag_account
|
||||
|
||||
Associate an identifier with an account address (@xref{cic-eth system accounts})
|
||||
Associate an identifier with an account address (@xref{cic-eth-system-accounts})
|
||||
|
||||
@appendixsection have_account
|
||||
|
||||
|
@ -14,5 +14,6 @@ Released 2021 under GPL3
|
||||
@c
|
||||
@contents
|
||||
|
||||
@include index.texi
|
||||
@include content.texi
|
||||
@include appendix.texi
|
||||
|
||||
|
3
apps/cic-eth/doc/texinfo/appendix.texi
Normal file
3
apps/cic-eth/doc/texinfo/appendix.texi
Normal file
@ -0,0 +1,3 @@
|
||||
@include admin.texi
|
||||
@include chains.texi
|
||||
@include transfertypes.texi
|
@ -1,4 +1,4 @@
|
||||
@node cic-eth Appendix Task chains
|
||||
@anchor{cic-eth-appendix-task-chains}
|
||||
@appendix Task chains
|
||||
|
||||
TBC - explain here how to generate these chain diagrams
|
||||
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth configuration
|
||||
@section Configuration
|
||||
|
||||
Configuration parameters are grouped by configuration filename.
|
||||
|
@ -1,6 +1,6 @@
|
||||
@node cic-eth
|
||||
@top cic-eth
|
||||
|
||||
@include intro.texi
|
||||
@include dependencies.texi
|
||||
@include configuration.texi
|
||||
@include system.texi
|
||||
@ -9,6 +9,3 @@
|
||||
@include incoming.texi
|
||||
@include services.texi
|
||||
@include tools.texi
|
||||
@include admin.texi
|
||||
@include chains.texi
|
||||
@include transfertypes.texi
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth-dependencies
|
||||
@section Dependencies
|
||||
|
||||
This application is written in Python 3.8. It is tightly coupled with @code{python-celery}, which provides the task worker ecosystem. It also uses @code{SQLAlchemy} which provides useful abstractions for persistent storage though SQL, and @code{alembic} for database schema migrations.
|
||||
|
@ -1,4 +1,4 @@
|
||||
@node cic-eth-incoming
|
||||
@anchor{cic-eth-incoming}
|
||||
@section Incoming transactions
|
||||
|
||||
All transactions in mined blocks will be passed to a selection of plugin filters to the @code{chainsyncer} component. Each of these filters are individual python module files in @code{cic_eth.runnable.daemons.filters}. This section describes their function.
|
||||
|
@ -1,9 +1,8 @@
|
||||
@node cic-eth-interacting
|
||||
@section Interacting with the system
|
||||
|
||||
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth Appendix Task chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
|
||||
The API to the @var{cic-eth} component is a proxy for executing @emph{chains of Celery tasks}. The tasks that compose individual chains are documented in @ref{cic-eth-appendix-task-chains,the Task Chain appendix}, which also describes a CLI tool that can generate graph representationso of them.
|
||||
|
||||
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth system maintenance,the Admin API appendix}.
|
||||
There are two API classes, @var{Api} and @var{AdminApi}. The former is described later in this section, the latter described in @ref{cic-eth-appendix-system-maintenance,the Admin API appendix}.
|
||||
|
||||
|
||||
@subsection Interface
|
||||
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth-outgoing
|
||||
@section Outgoing transactions
|
||||
|
||||
@strong{Important! A pre-requisite for proper functioning of the component is that no other agent is sending transactions to the network for any of the keys in the keystore.}
|
||||
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth-services
|
||||
@section Services
|
||||
|
||||
There are four daemons that together orchestrate all of the aforementioned recipes. This section will provide a high level description of them.
|
||||
|
@ -1,10 +1,10 @@
|
||||
@node cic-eth system accounts
|
||||
@section System initialization
|
||||
|
||||
When the system starts for the first time, it is locked for any state change request other than account creation@footnote{Specifically, the @code{INIT}, @code{SEND} and @code{QUEUE} lock bits are set.}. These locks should be @emph{reset} once system initialization has been completed. Currently, system initialization only involves creating and tagging required system accounts, as specified below.
|
||||
|
||||
See @ref{cic-eth-locking,Locking} and @ref{cic-eth-tools-ctrl,ctrl in Tools} for details on locking.
|
||||
|
||||
@anchor{cic-eth-system-accounts}
|
||||
@subsection System accounts
|
||||
|
||||
Certain accounts in the system have special roles. These are defined by @emph{tagging} certain accounts addresses with well-known identifiers.
|
||||
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth-tools
|
||||
@section Tools
|
||||
|
||||
A collection of CLI tools have been provided to help with diagnostics and other administrative tasks. These use the same configuration infrastructure as the daemons.
|
||||
@ -37,7 +36,7 @@ Execute a token transfer on behalf of a custodial account.
|
||||
|
||||
@subsection tag (cic-eth-tag)
|
||||
|
||||
Associate an account address with a string identifier. @xref{cic-eth system accounts}
|
||||
Associate an account address with a string identifier. @xref{cic-eth-system-accounts}
|
||||
|
||||
|
||||
@anchor{cic-eth-tools-ctrl}
|
||||
|
@ -1,4 +1,3 @@
|
||||
@node cic-eth Appendix Transaction types
|
||||
@appendix Transfer types
|
||||
|
||||
@table @var
|
||||
|
@ -5,15 +5,19 @@ WORKDIR /root
|
||||
|
||||
RUN apk add --no-cache postgresql bash
|
||||
|
||||
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
|
||||
RUN npm config set snyk=false
|
||||
#RUN npm config set registry={NPM_REPOSITORY}
|
||||
RUN npm config set registry=${NPM_REPOSITORY}
|
||||
|
||||
# copy the dependencies
|
||||
COPY package.json package-lock.json ./
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
RUN npm set cache /root/.npm && \
|
||||
npm cache verify && \
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
npm set cache /root/.npm && \
|
||||
npm ci --verbose
|
||||
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
COPY webpack.config.js ./
|
||||
COPY tsconfig.json ./
|
||||
## required to build the cic-client-meta module
|
||||
COPY . .
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
5650
apps/cic-meta/package-lock.json
generated
5650
apps/cic-meta/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,9 @@
|
||||
create table if not exists store (
|
||||
id serial primary key not null,
|
||||
owner_fingerprint text not null,
|
||||
owner_fingerprint text default null,
|
||||
hash char(64) not null unique,
|
||||
content text not null
|
||||
content text not null,
|
||||
mime_type text
|
||||
);
|
||||
|
||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
||||
|
@ -1,9 +1,10 @@
|
||||
create table if not exists store (
|
||||
/*id serial primary key not null,*/
|
||||
id integer primary key autoincrement,
|
||||
owner_fingerprint text not null,
|
||||
owner_fingerprint text default null,
|
||||
hash char(64) not null unique,
|
||||
content text not null
|
||||
content text not null,
|
||||
mime_type text
|
||||
);
|
||||
|
||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
||||
|
@ -1,12 +1,13 @@
|
||||
import * as Automerge from 'automerge';
|
||||
import * as pgp from 'openpgp';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
import { Envelope, Syncable } from '@cicnet/crdt-meta';
|
||||
import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
|
||||
|
||||
|
||||
function handleNoMergeGet(db, digest, keystore) {
|
||||
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'";
|
||||
return new Promise<string|boolean>((whohoo, doh) => {
|
||||
const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
|
||||
return new Promise<any>((whohoo, doh) => {
|
||||
db.query(sql, (e, rs) => {
|
||||
if (e !== null && e !== undefined) {
|
||||
doh(e);
|
||||
@ -16,16 +17,36 @@ function handleNoMergeGet(db, digest, keystore) {
|
||||
return;
|
||||
}
|
||||
|
||||
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
|
||||
let mimeType;
|
||||
if (immutable) {
|
||||
if (rs.rows[0]['mime_type'] === undefined) {
|
||||
mimeType = 'application/octet-stream';
|
||||
} else {
|
||||
mimeType = rs.rows[0]['mime_type'];
|
||||
}
|
||||
} else {
|
||||
mimeType = 'application/json';
|
||||
}
|
||||
|
||||
const cipherText = rs.rows[0]['content'];
|
||||
pgp.message.readArmored(cipherText).then((m) => {
|
||||
const opts = {
|
||||
message: m,
|
||||
privateKeys: [keystore.getPrivateKey()],
|
||||
format: 'binary',
|
||||
};
|
||||
pgp.decrypt(opts).then((plainText) => {
|
||||
const o = Syncable.fromJSON(plainText.data);
|
||||
const r = JSON.stringify(o.m['data']);
|
||||
whohoo(r);
|
||||
let r;
|
||||
if (immutable) {
|
||||
r = plainText.data;
|
||||
} else {
|
||||
mimeType = 'application/json';
|
||||
const d = new TextDecoder().decode(plainText.data);
|
||||
const o = Syncable.fromJSON(d);
|
||||
r = JSON.stringify(o.m['data']);
|
||||
}
|
||||
whohoo([r, mimeType]);
|
||||
}).catch((e) => {
|
||||
console.error('decrypt', e);
|
||||
doh(e);
|
||||
@ -57,6 +78,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
|
||||
} else {
|
||||
e = Envelope.fromJSON(v);
|
||||
s = e.unwrap();
|
||||
console.debug('s', s, o)
|
||||
s.replace(o, 'server merge');
|
||||
e.set(s);
|
||||
s.onwrap = (e) => {
|
||||
@ -139,7 +161,13 @@ function handleClientMergeGet(db, digest, keystore) {
|
||||
privateKeys: [keystore.getPrivateKey()],
|
||||
};
|
||||
pgp.decrypt(opts).then((plainText) => {
|
||||
const o = Syncable.fromJSON(plainText.data);
|
||||
let d;
|
||||
if (typeof(plainText.data) == 'string') {
|
||||
d = plainText.data;
|
||||
} else {
|
||||
d = new TextDecoder().decode(plainText.data);
|
||||
}
|
||||
const o = Syncable.fromJSON(d);
|
||||
const e = new Envelope(o);
|
||||
whohoo(e.toJSON());
|
||||
}).catch((e) => {
|
||||
@ -201,10 +229,65 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function handleImmutablePost(data, db, digest, keystore, contentType) {
|
||||
return new Promise<Array<string|boolean>>((whohoo, doh) => {
|
||||
let data_binary = data;
|
||||
const h = crypto.createHash('sha256');
|
||||
h.update(data_binary);
|
||||
const z = h.digest();
|
||||
const r = bytesToHex(z);
|
||||
|
||||
if (digest) {
|
||||
if (r != digest) {
|
||||
doh('hash mismatch: ' + r + ' != ' + digest);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
digest = r;
|
||||
console.debug('calculated digest ' + digest);
|
||||
}
|
||||
|
||||
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
|
||||
if (haveDigest !== false) {
|
||||
whohoo([false, digest]);
|
||||
return;
|
||||
}
|
||||
let message;
|
||||
if (typeof(data) == 'string') {
|
||||
data_binary = new TextEncoder().encode(data);
|
||||
message = pgp.message.fromText(data);
|
||||
} else {
|
||||
message = pgp.message.fromBinary(data);
|
||||
}
|
||||
|
||||
const opts = {
|
||||
message: message,
|
||||
publicKeys: keystore.getEncryptKeys(),
|
||||
};
|
||||
pgp.encrypt(opts).then((cipherText) => {
|
||||
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
|
||||
db.query(sql, (e, rs) => {
|
||||
if (e !== null && e !== undefined) {
|
||||
doh(e);
|
||||
return;
|
||||
}
|
||||
whohoo([true, digest]);
|
||||
});
|
||||
}).catch((e) => {
|
||||
doh(e);
|
||||
});
|
||||
}).catch((e) => {
|
||||
doh(e);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
handleClientMergePut,
|
||||
handleClientMergeGet,
|
||||
handleServerMergePost,
|
||||
handleServerMergePut,
|
||||
handleNoMergeGet,
|
||||
handleImmutablePost,
|
||||
};
|
||||
|
@ -118,37 +118,71 @@ async function processRequest(req, res) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mod = req.method.toLowerCase() + ":automerge:";
|
||||
let modDetail = undefined;
|
||||
let immutablePost = false;
|
||||
try {
|
||||
digest = parseDigest(req.url);
|
||||
} catch(e) {
|
||||
console.error('digest error: ' + e)
|
||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
if (req.url == '/') {
|
||||
immutablePost = true;
|
||||
modDetail = 'immutable';
|
||||
} else {
|
||||
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
|
||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const mergeHeader = req.headers['x-cic-automerge'];
|
||||
let mod = req.method.toLowerCase() + ":automerge:";
|
||||
switch (mergeHeader) {
|
||||
case "client":
|
||||
mod += "client"; // client handles merges
|
||||
break;
|
||||
case "server":
|
||||
mod += "server"; // server handles merges
|
||||
break;
|
||||
default:
|
||||
mod += "none"; // merged object only (get only)
|
||||
if (modDetail === undefined) {
|
||||
const mergeHeader = req.headers['x-cic-automerge'];
|
||||
switch (mergeHeader) {
|
||||
case "client":
|
||||
if (immutablePost) {
|
||||
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
modDetail = "client"; // client handles merges
|
||||
break;
|
||||
case "server":
|
||||
if (immutablePost) {
|
||||
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
modDetail = "server"; // server handles merges
|
||||
break;
|
||||
case "immutable":
|
||||
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
|
||||
break;
|
||||
default:
|
||||
modDetail = "none"; // merged object only (get only)
|
||||
}
|
||||
}
|
||||
mod += modDetail;
|
||||
|
||||
let data = '';
|
||||
|
||||
// handle bigger chunks of data
|
||||
let data;
|
||||
req.on('data', (d) => {
|
||||
data += d;
|
||||
if (data === undefined) {
|
||||
data = d;
|
||||
} else {
|
||||
data += d;
|
||||
}
|
||||
});
|
||||
req.on('end', async () => {
|
||||
console.debug('mode', mod);
|
||||
let content = '';
|
||||
req.on('end', async (d) => {
|
||||
let inputContentType = req.headers['content-type'];
|
||||
let debugString = 'executing mode ' + mod ;
|
||||
if (data !== undefined) {
|
||||
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
|
||||
}
|
||||
console.debug(debugString);
|
||||
let content;
|
||||
let contentType = 'application/json';
|
||||
console.debug('handling data', data);
|
||||
let statusCode = 200;
|
||||
let r:any = undefined;
|
||||
try {
|
||||
switch (mod) {
|
||||
@ -176,6 +210,7 @@ async function processRequest(req, res) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
content = '';
|
||||
break;
|
||||
//case 'get:automerge:server':
|
||||
// content = await handlers.handleServerMergeGet(db, digest, keystore);
|
||||
@ -183,12 +218,24 @@ async function processRequest(req, res) {
|
||||
|
||||
case 'get:automerge:none':
|
||||
r = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
if (r == false) {
|
||||
if (r === false) {
|
||||
res.writeHead(404, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
content = r;
|
||||
content = r[0];
|
||||
contentType = r[1];
|
||||
break;
|
||||
|
||||
case 'post:automerge:immutable':
|
||||
if (inputContentType === undefined) {
|
||||
inputContentType = 'application/octet-stream';
|
||||
}
|
||||
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
|
||||
if (r[0]) {
|
||||
statusCode = 201;
|
||||
}
|
||||
content = r[1];
|
||||
break;
|
||||
|
||||
default:
|
||||
@ -210,8 +257,15 @@ async function processRequest(req, res) {
|
||||
return;
|
||||
}
|
||||
|
||||
const responseContentLength = (new TextEncoder().encode(content)).length;
|
||||
res.writeHead(200, {
|
||||
//let responseContentLength;
|
||||
//if (typeof(content) == 'string') {
|
||||
// (new TextEncoder().encode(content)).length;
|
||||
//}
|
||||
const responseContentLength = content.length;
|
||||
//if (responseContentLength === undefined) {
|
||||
// responseContentLength = 0;
|
||||
//}
|
||||
res.writeHead(statusCode, {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Content-Type": contentType,
|
||||
"Content-Length": responseContentLength,
|
||||
|
@ -7,6 +7,8 @@ import * as handlers from '../scripts/server/handlers';
|
||||
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
|
||||
import { SqliteAdapter } from '../src/db';
|
||||
|
||||
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
|
||||
|
||||
function createKeystore() {
|
||||
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
|
||||
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
|
||||
@ -44,11 +46,13 @@ function createDatabase(sqlite_file:string):Promise<any> {
|
||||
// doh(e);
|
||||
// return;
|
||||
// }
|
||||
// get this from real sql files sources
|
||||
const sql = `CREATE TABLE store (
|
||||
id integer primary key autoincrement,
|
||||
owner_fingerprint text not null,
|
||||
owner_fingerprint text default null,
|
||||
hash char(64) not null unique,
|
||||
content text not null
|
||||
content text not null,
|
||||
mime_type text default null
|
||||
);
|
||||
`
|
||||
|
||||
@ -111,15 +115,18 @@ describe('server', async () => {
|
||||
let j = env.toJSON();
|
||||
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
assert(content); // true-ish
|
||||
console.debug('content', content);
|
||||
|
||||
let v = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
if (v === undefined) {
|
||||
if (v === false) {
|
||||
db.close();
|
||||
assert.fail('');
|
||||
}
|
||||
db.close();
|
||||
return;
|
||||
|
||||
v = await handlers.handleClientMergeGet(db, digest, keystore);
|
||||
if (v === undefined) {
|
||||
if (v === false) {
|
||||
db.close();
|
||||
assert.fail('');
|
||||
}
|
||||
@ -187,7 +194,7 @@ describe('server', async () => {
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(v); // true-ish
|
||||
|
||||
let o = JSON.parse(j);
|
||||
let o = JSON.parse(j[0]);
|
||||
o.bar = 'xyzzy';
|
||||
j = JSON.stringify(o);
|
||||
|
||||
@ -212,82 +219,39 @@ describe('server', async () => {
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(j); // true-ish
|
||||
o = JSON.parse(j);
|
||||
o = JSON.parse(j[0]);
|
||||
console.log(o);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
await it('server_merge', async () => {
|
||||
const keystore = await createKeystore();
|
||||
const signer = new PGPSigner(keystore);
|
||||
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const digest = 'deadbeef';
|
||||
let s = new Syncable(digest, {
|
||||
bar: 'baz',
|
||||
});
|
||||
let env = await wrap(s, signer)
|
||||
let j:any = env.toJSON();
|
||||
|
||||
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
assert(v); // true-ish
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(v); // true-ish
|
||||
|
||||
let o = JSON.parse(j);
|
||||
o.bar = 'xyzzy';
|
||||
j = JSON.stringify(o);
|
||||
|
||||
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
assert(signMaterial)
|
||||
|
||||
env = Envelope.fromJSON(signMaterial);
|
||||
|
||||
console.log('envvvv', env);
|
||||
|
||||
const signedData = await signData(env.o['digest'], keystore);
|
||||
console.log('signed', signedData);
|
||||
|
||||
o = {
|
||||
'm': env,
|
||||
's': signedData,
|
||||
}
|
||||
j = JSON.stringify(o);
|
||||
console.log(j);
|
||||
|
||||
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
assert(v);
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(j); // true-ish
|
||||
o = JSON.parse(j);
|
||||
console.log(o);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
|
||||
|
||||
// await it('server_merge_empty', async () => {
|
||||
// await it('server_merge', async () => {
|
||||
// const keystore = await createKeystore();
|
||||
// const signer = new PGPSigner(keystore);
|
||||
//
|
||||
// const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
//
|
||||
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
|
||||
// let o:any = {
|
||||
// foo: 'bar',
|
||||
// xyzzy: 42,
|
||||
// }
|
||||
// let j:any = JSON.stringify(o);
|
||||
// const digest = 'deadbeef';
|
||||
// let s = new Syncable(digest, {
|
||||
// bar: 'baz',
|
||||
// });
|
||||
// let env = await wrap(s, signer)
|
||||
// let j:any = env.toJSON();
|
||||
//
|
||||
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
|
||||
// assert(v); // true-ish
|
||||
//
|
||||
// j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
// assert(v); // true-ish
|
||||
//
|
||||
// let o = JSON.parse(j);
|
||||
// o.bar = 'xyzzy';
|
||||
// j = JSON.stringify(o);
|
||||
//
|
||||
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
// assert(signMaterial)
|
||||
//
|
||||
// const env = Envelope.fromJSON(signMaterial);
|
||||
// env = Envelope.fromJSON(signMaterial);
|
||||
//
|
||||
// console.log('envvvv', env);
|
||||
//
|
||||
@ -301,7 +265,7 @@ describe('server', async () => {
|
||||
// j = JSON.stringify(o);
|
||||
// console.log(j);
|
||||
//
|
||||
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
// v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
// assert(v);
|
||||
//
|
||||
// j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
@ -311,5 +275,88 @@ describe('server', async () => {
|
||||
//
|
||||
// db.close();
|
||||
// });
|
||||
//
|
||||
|
||||
|
||||
await it('server_merge_empty', async () => {
|
||||
const keystore = await createKeystore();
|
||||
const signer = new PGPSigner(keystore);
|
||||
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
|
||||
let o:any = {
|
||||
foo: 'bar',
|
||||
xyzzy: 42,
|
||||
}
|
||||
let j:any = JSON.stringify(o);
|
||||
|
||||
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
|
||||
assert(signMaterial)
|
||||
|
||||
const env = Envelope.fromJSON(signMaterial);
|
||||
|
||||
console.log('envvvv', env);
|
||||
|
||||
const signedData = await signData(env.o['digest'], keystore);
|
||||
console.log('signed', signedData);
|
||||
|
||||
o = {
|
||||
'm': env,
|
||||
's': signedData,
|
||||
}
|
||||
j = JSON.stringify(o);
|
||||
console.log(j);
|
||||
|
||||
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
|
||||
assert(v);
|
||||
|
||||
j = await handlers.handleNoMergeGet(db, digest, keystore);
|
||||
assert(j); // true-ish
|
||||
o = JSON.parse(j[0]);
|
||||
console.log(o);
|
||||
|
||||
db.close();
|
||||
});
|
||||
|
||||
await it('immutable_nodigest', async() => {
|
||||
const keystore = await createKeystore();
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const s:string = 'foo';
|
||||
let r;
|
||||
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
|
||||
assert(r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
const b:Uint8Array = new TextEncoder().encode(s);
|
||||
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
});
|
||||
|
||||
await it('immutable_digest', async() => {
|
||||
const keystore = await createKeystore();
|
||||
const db = await createDatabase(__dirname + '/db.three.sqlite');
|
||||
|
||||
const s:string = 'foo';
|
||||
const b:Uint8Array = new TextEncoder().encode(s);
|
||||
let r;
|
||||
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
|
||||
assert(r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
|
||||
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
|
||||
assert(!r[0]);
|
||||
assert(hashOfFoo == r[1]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -450,6 +450,7 @@ services:
|
||||
dockerfile: docker/Dockerfile
|
||||
args:
|
||||
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
|
||||
NPM_REPOSITORY: ${DEV_NPM_REPOSITORY:-https://registry.npmjs.org}
|
||||
environment:
|
||||
DATABASE_HOST: ${DATABASE_HOST:-postgres}
|
||||
DATABASE_PORT: ${DATABASE_PORT:-5432}
|
||||
|
Loading…
Reference in New Issue
Block a user