feat: Add immutable pointers to meta

This commit is contained in:
Louis Holbrook 2021-10-25 18:51:08 +00:00
parent d8f51c5bdd
commit fe0835a4e7
8 changed files with 5930 additions and 131 deletions

View File

@ -5,15 +5,19 @@ WORKDIR /root
RUN apk add --no-cache postgresql bash
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
RUN npm config set snyk=false
#RUN npm config set registry={NPM_REPOSITORY}
RUN npm config set registry=${NPM_REPOSITORY}
# copy the dependencies
COPY package.json package-lock.json ./
#RUN --mount=type=cache,mode=0755,target=/root/.npm \
RUN npm set cache /root/.npm && \
npm cache verify && \
RUN --mount=type=cache,mode=0755,target=/root/.npm \
npm set cache /root/.npm && \
npm ci --verbose
COPY webpack.config.js .
COPY tsconfig.json .
COPY webpack.config.js ./
COPY tsconfig.json ./
## required to build the cic-client-meta module
COPY . .
COPY tests/*.asc /root/pgp/

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,9 @@
create table if not exists store (
id serial primary key not null,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text
);
create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,9 +1,10 @@
create table if not exists store (
/*id serial primary key not null,*/
id integer primary key autoincrement,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text
);
create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,12 +1,13 @@
import * as Automerge from 'automerge';
import * as pgp from 'openpgp';
import * as crypto from 'crypto';
import { Envelope, Syncable } from '@cicnet/crdt-meta';
import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
function handleNoMergeGet(db, digest, keystore) {
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'";
return new Promise<string|boolean>((whohoo, doh) => {
const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
return new Promise<any>((whohoo, doh) => {
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
@ -16,16 +17,36 @@ function handleNoMergeGet(db, digest, keystore) {
return;
}
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
let mimeType;
if (immutable) {
if (rs.rows[0]['mime_type'] === undefined) {
mimeType = 'application/octet-stream';
} else {
mimeType = rs.rows[0]['mime_type'];
}
} else {
mimeType = 'application/json';
}
const cipherText = rs.rows[0]['content'];
pgp.message.readArmored(cipherText).then((m) => {
const opts = {
message: m,
privateKeys: [keystore.getPrivateKey()],
format: 'binary',
};
pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data);
const r = JSON.stringify(o.m['data']);
whohoo(r);
let r;
if (immutable) {
r = plainText.data;
} else {
mimeType = 'application/json';
const d = new TextDecoder().decode(plainText.data);
const o = Syncable.fromJSON(d);
r = JSON.stringify(o.m['data']);
}
whohoo([r, mimeType]);
}).catch((e) => {
console.error('decrypt', e);
doh(e);
@ -57,6 +78,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
} else {
e = Envelope.fromJSON(v);
s = e.unwrap();
console.debug('s', s, o)
s.replace(o, 'server merge');
e.set(s);
s.onwrap = (e) => {
@ -139,7 +161,13 @@ function handleClientMergeGet(db, digest, keystore) {
privateKeys: [keystore.getPrivateKey()],
};
pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data);
let d;
if (typeof(plainText.data) == 'string') {
d = plainText.data;
} else {
d = new TextDecoder().decode(plainText.data);
}
const o = Syncable.fromJSON(d);
const e = new Envelope(o);
whohoo(e.toJSON());
}).catch((e) => {
@ -201,10 +229,65 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
});
}
function handleImmutablePost(data, db, digest, keystore, contentType) {
return new Promise<Array<string|boolean>>((whohoo, doh) => {
let data_binary = data;
const h = crypto.createHash('sha256');
h.update(data_binary);
const z = h.digest();
const r = bytesToHex(z);
if (digest) {
if (r != digest) {
doh('hash mismatch: ' + r + ' != ' + digest);
return;
}
} else {
digest = r;
console.debug('calculated digest ' + digest);
}
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
if (haveDigest !== false) {
whohoo([false, digest]);
return;
}
let message;
if (typeof(data) == 'string') {
data_binary = new TextEncoder().encode(data);
message = pgp.message.fromText(data);
} else {
message = pgp.message.fromBinary(data);
}
const opts = {
message: message,
publicKeys: keystore.getEncryptKeys(),
};
pgp.encrypt(opts).then((cipherText) => {
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
return;
}
whohoo([true, digest]);
});
}).catch((e) => {
doh(e);
});
}).catch((e) => {
doh(e);
});
});
}
export {
handleClientMergePut,
handleClientMergeGet,
handleServerMergePost,
handleServerMergePut,
handleNoMergeGet,
handleImmutablePost,
};

View File

@ -118,37 +118,71 @@ async function processRequest(req, res) {
return;
}
let mod = req.method.toLowerCase() + ":automerge:";
let modDetail = undefined;
let immutablePost = false;
try {
digest = parseDigest(req.url);
} catch(e) {
console.error('digest error: ' + e)
res.writeHead(400, {"Content-Type": "text/plain"});
res.end();
return;
if (req.url == '/') {
immutablePost = true;
modDetail = 'immutable';
} else {
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
res.writeHead(400, {"Content-Type": "text/plain"});
res.end();
return;
}
}
const mergeHeader = req.headers['x-cic-automerge'];
let mod = req.method.toLowerCase() + ":automerge:";
switch (mergeHeader) {
case "client":
mod += "client"; // client handles merges
break;
case "server":
mod += "server"; // server handles merges
break;
default:
mod += "none"; // merged object only (get only)
if (modDetail === undefined) {
const mergeHeader = req.headers['x-cic-automerge'];
switch (mergeHeader) {
case "client":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "client"; // client handles merges
break;
case "server":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "server"; // server handles merges
break;
case "immutable":
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
break;
default:
modDetail = "none"; // merged object only (get only)
}
}
mod += modDetail;
let data = '';
// handle bigger chunks of data
let data;
req.on('data', (d) => {
data += d;
if (data === undefined) {
data = d;
} else {
data += d;
}
});
req.on('end', async () => {
console.debug('mode', mod);
let content = '';
req.on('end', async (d) => {
let inputContentType = req.headers['content-type'];
let debugString = 'executing mode ' + mod ;
if (data !== undefined) {
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
}
console.debug(debugString);
let content;
let contentType = 'application/json';
console.debug('handling data', data);
let statusCode = 200;
let r:any = undefined;
try {
switch (mod) {
@ -176,6 +210,7 @@ async function processRequest(req, res) {
res.end();
return;
}
content = '';
break;
//case 'get:automerge:server':
// content = await handlers.handleServerMergeGet(db, digest, keystore);
@ -183,12 +218,24 @@ async function processRequest(req, res) {
case 'get:automerge:none':
r = await handlers.handleNoMergeGet(db, digest, keystore);
if (r == false) {
if (r === false) {
res.writeHead(404, {"Content-Type": "text/plain"});
res.end();
return;
}
content = r;
content = r[0];
contentType = r[1];
break;
case 'post:automerge:immutable':
if (inputContentType === undefined) {
inputContentType = 'application/octet-stream';
}
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
if (r[0]) {
statusCode = 201;
}
content = r[1];
break;
default:
@ -210,8 +257,15 @@ async function processRequest(req, res) {
return;
}
const responseContentLength = (new TextEncoder().encode(content)).length;
res.writeHead(200, {
//let responseContentLength;
//if (typeof(content) == 'string') {
// (new TextEncoder().encode(content)).length;
//}
const responseContentLength = content.length;
//if (responseContentLength === undefined) {
// responseContentLength = 0;
//}
res.writeHead(statusCode, {
"Access-Control-Allow-Origin": "*",
"Content-Type": contentType,
"Content-Length": responseContentLength,

View File

@ -7,6 +7,8 @@ import * as handlers from '../scripts/server/handlers';
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
import { SqliteAdapter } from '../src/db';
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
function createKeystore() {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
@ -44,11 +46,13 @@ function createDatabase(sqlite_file:string):Promise<any> {
// doh(e);
// return;
// }
// get this from real sql files sources
const sql = `CREATE TABLE store (
id integer primary key autoincrement,
owner_fingerprint text not null,
owner_fingerprint text default null,
hash char(64) not null unique,
content text not null
content text not null,
mime_type text default null
);
`
@ -111,15 +115,18 @@ describe('server', async () => {
let j = env.toJSON();
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(content); // true-ish
console.debug('content', content);
let v = await handlers.handleNoMergeGet(db, digest, keystore);
if (v === undefined) {
if (v === false) {
db.close();
assert.fail('');
}
db.close();
return;
v = await handlers.handleClientMergeGet(db, digest, keystore);
if (v === undefined) {
if (v === false) {
db.close();
assert.fail('');
}
@ -187,7 +194,7 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
let o = JSON.parse(j[0]);
o.bar = 'xyzzy';
j = JSON.stringify(o);
@ -212,82 +219,39 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('server_merge', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = 'deadbeef';
let s = new Syncable(digest, {
bar: 'baz',
});
let env = await wrap(s, signer)
let j:any = env.toJSON();
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(v); // true-ish
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
o.bar = 'xyzzy';
j = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
console.log(o);
db.close();
});
// await it('server_merge_empty', async () => {
// await it('server_merge', async () => {
// const keystore = await createKeystore();
// const signer = new PGPSigner(keystore);
//
// const db = await createDatabase(__dirname + '/db.three.sqlite');
//
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
// let o:any = {
// foo: 'bar',
// xyzzy: 42,
// }
// let j:any = JSON.stringify(o);
// const digest = 'deadbeef';
// let s = new Syncable(digest, {
// bar: 'baz',
// });
// let env = await wrap(s, signer)
// let j:any = env.toJSON();
//
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
// assert(v); // true-ish
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
// assert(v); // true-ish
//
// let o = JSON.parse(j);
// o.bar = 'xyzzy';
// j = JSON.stringify(o);
//
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
// assert(signMaterial)
//
// const env = Envelope.fromJSON(signMaterial);
// env = Envelope.fromJSON(signMaterial);
//
// console.log('envvvv', env);
//
@ -301,7 +265,7 @@ describe('server', async () => {
// j = JSON.stringify(o);
// console.log(j);
//
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// assert(v);
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
@ -311,5 +275,88 @@ describe('server', async () => {
//
// db.close();
// });
//
await it('server_merge_empty', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
let o:any = {
foo: 'bar',
xyzzy: 42,
}
let j:any = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
const env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('immutable_nodigest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
let r;
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
const b:Uint8Array = new TextEncoder().encode(s);
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
await it('immutable_digest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
const b:Uint8Array = new TextEncoder().encode(s);
let r;
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(!r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
});

View File

@ -450,6 +450,7 @@ services:
dockerfile: docker/Dockerfile
args:
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
NPM_REPOSITORY: ${DEV_NPM_REPOSITORY:-https://registry.npmjs.org}
environment:
DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432}
@ -666,4 +667,4 @@ services:
set +a
./import_ussd.sh
volumes:
- contract-config:/tmp/cic/config/:ro
- contract-config:/tmp/cic/config/:ro