feat: Add immutable pointers to meta

This commit is contained in:
Louis Holbrook 2021-10-25 18:51:08 +00:00
parent d8f51c5bdd
commit fe0835a4e7
8 changed files with 5930 additions and 131 deletions

View File

@ -5,15 +5,19 @@ WORKDIR /root
RUN apk add --no-cache postgresql bash RUN apk add --no-cache postgresql bash
ARG NPM_REPOSITORY=${NPM_REPOSITORY:-https://registry.npmjs.org}
RUN npm config set snyk=false
#RUN npm config set registry={NPM_REPOSITORY}
RUN npm config set registry=${NPM_REPOSITORY}
# copy the dependencies # copy the dependencies
COPY package.json package-lock.json ./ COPY package.json package-lock.json ./
#RUN --mount=type=cache,mode=0755,target=/root/.npm \ RUN --mount=type=cache,mode=0755,target=/root/.npm \
RUN npm set cache /root/.npm && \ npm set cache /root/.npm && \
npm cache verify && \
npm ci --verbose npm ci --verbose
COPY webpack.config.js . COPY webpack.config.js ./
COPY tsconfig.json . COPY tsconfig.json ./
## required to build the cic-client-meta module ## required to build the cic-client-meta module
COPY . . COPY . .
COPY tests/*.asc /root/pgp/ COPY tests/*.asc /root/pgp/

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,9 @@
create table if not exists store ( create table if not exists store (
id serial primary key not null, id serial primary key not null,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text
); );
create index if not exists idx_fp on store ((lower(owner_fingerprint))); create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,9 +1,10 @@
create table if not exists store ( create table if not exists store (
/*id serial primary key not null,*/ /*id serial primary key not null,*/
id integer primary key autoincrement, id integer primary key autoincrement,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text
); );
create index if not exists idx_fp on store ((lower(owner_fingerprint))); create index if not exists idx_fp on store ((lower(owner_fingerprint)));

View File

@ -1,12 +1,13 @@
import * as Automerge from 'automerge'; import * as Automerge from 'automerge';
import * as pgp from 'openpgp'; import * as pgp from 'openpgp';
import * as crypto from 'crypto';
import { Envelope, Syncable } from '@cicnet/crdt-meta'; import { Envelope, Syncable, bytesToHex } from '@cicnet/crdt-meta';
function handleNoMergeGet(db, digest, keystore) { function handleNoMergeGet(db, digest, keystore) {
const sql = "SELECT content FROM store WHERE hash = '" + digest + "'"; const sql = "SELECT owner_fingerprint, content, mime_type FROM store WHERE hash = '" + digest + "'";
return new Promise<string|boolean>((whohoo, doh) => { return new Promise<any>((whohoo, doh) => {
db.query(sql, (e, rs) => { db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) { if (e !== null && e !== undefined) {
doh(e); doh(e);
@ -16,16 +17,36 @@ function handleNoMergeGet(db, digest, keystore) {
return; return;
} }
const immutable = rs.rows[0]['owner_fingerprint'] == undefined;
let mimeType;
if (immutable) {
if (rs.rows[0]['mime_type'] === undefined) {
mimeType = 'application/octet-stream';
} else {
mimeType = rs.rows[0]['mime_type'];
}
} else {
mimeType = 'application/json';
}
const cipherText = rs.rows[0]['content']; const cipherText = rs.rows[0]['content'];
pgp.message.readArmored(cipherText).then((m) => { pgp.message.readArmored(cipherText).then((m) => {
const opts = { const opts = {
message: m, message: m,
privateKeys: [keystore.getPrivateKey()], privateKeys: [keystore.getPrivateKey()],
format: 'binary',
}; };
pgp.decrypt(opts).then((plainText) => { pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data); let r;
const r = JSON.stringify(o.m['data']); if (immutable) {
whohoo(r); r = plainText.data;
} else {
mimeType = 'application/json';
const d = new TextDecoder().decode(plainText.data);
const o = Syncable.fromJSON(d);
r = JSON.stringify(o.m['data']);
}
whohoo([r, mimeType]);
}).catch((e) => { }).catch((e) => {
console.error('decrypt', e); console.error('decrypt', e);
doh(e); doh(e);
@ -57,6 +78,7 @@ function handleServerMergePost(data, db, digest, keystore, signer) {
} else { } else {
e = Envelope.fromJSON(v); e = Envelope.fromJSON(v);
s = e.unwrap(); s = e.unwrap();
console.debug('s', s, o)
s.replace(o, 'server merge'); s.replace(o, 'server merge');
e.set(s); e.set(s);
s.onwrap = (e) => { s.onwrap = (e) => {
@ -139,7 +161,13 @@ function handleClientMergeGet(db, digest, keystore) {
privateKeys: [keystore.getPrivateKey()], privateKeys: [keystore.getPrivateKey()],
}; };
pgp.decrypt(opts).then((plainText) => { pgp.decrypt(opts).then((plainText) => {
const o = Syncable.fromJSON(plainText.data); let d;
if (typeof(plainText.data) == 'string') {
d = plainText.data;
} else {
d = new TextDecoder().decode(plainText.data);
}
const o = Syncable.fromJSON(d);
const e = new Envelope(o); const e = new Envelope(o);
whohoo(e.toJSON()); whohoo(e.toJSON());
}).catch((e) => { }).catch((e) => {
@ -201,10 +229,65 @@ function handleClientMergePut(data, db, digest, keystore, signer) {
}); });
} }
function handleImmutablePost(data, db, digest, keystore, contentType) {
return new Promise<Array<string|boolean>>((whohoo, doh) => {
let data_binary = data;
const h = crypto.createHash('sha256');
h.update(data_binary);
const z = h.digest();
const r = bytesToHex(z);
if (digest) {
if (r != digest) {
doh('hash mismatch: ' + r + ' != ' + digest);
return;
}
} else {
digest = r;
console.debug('calculated digest ' + digest);
}
handleNoMergeGet(db, digest, keystore).then((haveDigest) => {
if (haveDigest !== false) {
whohoo([false, digest]);
return;
}
let message;
if (typeof(data) == 'string') {
data_binary = new TextEncoder().encode(data);
message = pgp.message.fromText(data);
} else {
message = pgp.message.fromBinary(data);
}
const opts = {
message: message,
publicKeys: keystore.getEncryptKeys(),
};
pgp.encrypt(opts).then((cipherText) => {
const sql = "INSERT INTO store (hash, content, mime_type) VALUES ('" + digest + "', '" + cipherText.data + "', '" + contentType + "') ON CONFLICT (hash) DO UPDATE SET content = EXCLUDED.content;";
db.query(sql, (e, rs) => {
if (e !== null && e !== undefined) {
doh(e);
return;
}
whohoo([true, digest]);
});
}).catch((e) => {
doh(e);
});
}).catch((e) => {
doh(e);
});
});
}
export { export {
handleClientMergePut, handleClientMergePut,
handleClientMergeGet, handleClientMergeGet,
handleServerMergePost, handleServerMergePost,
handleServerMergePut, handleServerMergePut,
handleNoMergeGet, handleNoMergeGet,
handleImmutablePost,
}; };

View File

@ -118,37 +118,71 @@ async function processRequest(req, res) {
return; return;
} }
let mod = req.method.toLowerCase() + ":automerge:";
let modDetail = undefined;
let immutablePost = false;
try { try {
digest = parseDigest(req.url); digest = parseDigest(req.url);
} catch(e) { } catch(e) {
console.error('digest error: ' + e) if (req.url == '/') {
res.writeHead(400, {"Content-Type": "text/plain"}); immutablePost = true;
res.end(); modDetail = 'immutable';
return; } else {
console.error('url is not empty (' + req.url + ') and not valid digest error: ' + e)
res.writeHead(400, {"Content-Type": "text/plain"});
res.end();
return;
}
} }
const mergeHeader = req.headers['x-cic-automerge']; if (modDetail === undefined) {
let mod = req.method.toLowerCase() + ":automerge:"; const mergeHeader = req.headers['x-cic-automerge'];
switch (mergeHeader) { switch (mergeHeader) {
case "client": case "client":
mod += "client"; // client handles merges if (immutablePost) {
break; res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
case "server": res.end();
mod += "server"; // server handles merges return;
break; }
default: modDetail = "client"; // client handles merges
mod += "none"; // merged object only (get only) break;
case "server":
if (immutablePost) {
res.writeHead(400, 'Valid digest missing', {"Content-Type": "text/plain"});
res.end();
return;
}
modDetail = "server"; // server handles merges
break;
case "immutable":
modDetail = "immutable"; // no merging, literal immutable content with content-addressing
break;
default:
modDetail = "none"; // merged object only (get only)
}
} }
mod += modDetail;
let data = '';
// handle bigger chunks of data
let data;
req.on('data', (d) => { req.on('data', (d) => {
data += d; if (data === undefined) {
data = d;
} else {
data += d;
}
}); });
req.on('end', async () => { req.on('end', async (d) => {
console.debug('mode', mod); let inputContentType = req.headers['content-type'];
let content = ''; let debugString = 'executing mode ' + mod ;
if (data !== undefined) {
debugString += ' for content type ' + inputContentType + ' length ' + data.length;
}
console.debug(debugString);
let content;
let contentType = 'application/json'; let contentType = 'application/json';
console.debug('handling data', data); let statusCode = 200;
let r:any = undefined; let r:any = undefined;
try { try {
switch (mod) { switch (mod) {
@ -176,6 +210,7 @@ async function processRequest(req, res) {
res.end(); res.end();
return; return;
} }
content = '';
break; break;
//case 'get:automerge:server': //case 'get:automerge:server':
// content = await handlers.handleServerMergeGet(db, digest, keystore); // content = await handlers.handleServerMergeGet(db, digest, keystore);
@ -183,12 +218,24 @@ async function processRequest(req, res) {
case 'get:automerge:none': case 'get:automerge:none':
r = await handlers.handleNoMergeGet(db, digest, keystore); r = await handlers.handleNoMergeGet(db, digest, keystore);
if (r == false) { if (r === false) {
res.writeHead(404, {"Content-Type": "text/plain"}); res.writeHead(404, {"Content-Type": "text/plain"});
res.end(); res.end();
return; return;
} }
content = r; content = r[0];
contentType = r[1];
break;
case 'post:automerge:immutable':
if (inputContentType === undefined) {
inputContentType = 'application/octet-stream';
}
r = await handlers.handleImmutablePost(data, db, digest, keystore, inputContentType);
if (r[0]) {
statusCode = 201;
}
content = r[1];
break; break;
default: default:
@ -210,8 +257,15 @@ async function processRequest(req, res) {
return; return;
} }
const responseContentLength = (new TextEncoder().encode(content)).length; //let responseContentLength;
res.writeHead(200, { //if (typeof(content) == 'string') {
// (new TextEncoder().encode(content)).length;
//}
const responseContentLength = content.length;
//if (responseContentLength === undefined) {
// responseContentLength = 0;
//}
res.writeHead(statusCode, {
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Content-Type": contentType, "Content-Type": contentType,
"Content-Length": responseContentLength, "Content-Length": responseContentLength,

View File

@ -7,6 +7,8 @@ import * as handlers from '../scripts/server/handlers';
import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta'; import { Envelope, Syncable, ArgPair, PGPKeyStore, PGPSigner, KeyStore, Signer } from '@cicnet/crdt-meta';
import { SqliteAdapter } from '../src/db'; import { SqliteAdapter } from '../src/db';
const hashOfFoo = '2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae';
function createKeystore() { function createKeystore() {
const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8'); const pksa = fs.readFileSync(__dirname + '/privatekeys.asc', 'utf-8');
const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8'); const pubksa = fs.readFileSync(__dirname + '/publickeys.asc', 'utf-8');
@ -44,11 +46,13 @@ function createDatabase(sqlite_file:string):Promise<any> {
// doh(e); // doh(e);
// return; // return;
// } // }
// get this from real sql files sources
const sql = `CREATE TABLE store ( const sql = `CREATE TABLE store (
id integer primary key autoincrement, id integer primary key autoincrement,
owner_fingerprint text not null, owner_fingerprint text default null,
hash char(64) not null unique, hash char(64) not null unique,
content text not null content text not null,
mime_type text default null
); );
` `
@ -111,15 +115,18 @@ describe('server', async () => {
let j = env.toJSON(); let j = env.toJSON();
const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer); const content = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(content); // true-ish assert(content); // true-ish
console.debug('content', content);
let v = await handlers.handleNoMergeGet(db, digest, keystore); let v = await handlers.handleNoMergeGet(db, digest, keystore);
if (v === undefined) { if (v === false) {
db.close(); db.close();
assert.fail(''); assert.fail('');
} }
db.close();
return;
v = await handlers.handleClientMergeGet(db, digest, keystore); v = await handlers.handleClientMergeGet(db, digest, keystore);
if (v === undefined) { if (v === false) {
db.close(); db.close();
assert.fail(''); assert.fail('');
} }
@ -187,7 +194,7 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore); j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish assert(v); // true-ish
let o = JSON.parse(j); let o = JSON.parse(j[0]);
o.bar = 'xyzzy'; o.bar = 'xyzzy';
j = JSON.stringify(o); j = JSON.stringify(o);
@ -212,82 +219,39 @@ describe('server', async () => {
j = await handlers.handleNoMergeGet(db, digest, keystore); j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish assert(j); // true-ish
o = JSON.parse(j); o = JSON.parse(j[0]);
console.log(o); console.log(o);
db.close(); db.close();
}); });
await it('server_merge', async () => { // await it('server_merge', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = 'deadbeef';
let s = new Syncable(digest, {
bar: 'baz',
});
let env = await wrap(s, signer)
let j:any = env.toJSON();
let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
assert(v); // true-ish
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(v); // true-ish
let o = JSON.parse(j);
o.bar = 'xyzzy';
j = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j);
console.log(o);
db.close();
});
// await it('server_merge_empty', async () => {
// const keystore = await createKeystore(); // const keystore = await createKeystore();
// const signer = new PGPSigner(keystore); // const signer = new PGPSigner(keystore);
// //
// const db = await createDatabase(__dirname + '/db.three.sqlite'); // const db = await createDatabase(__dirname + '/db.three.sqlite');
// //
// const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; // const digest = 'deadbeef';
// let o:any = { // let s = new Syncable(digest, {
// foo: 'bar', // bar: 'baz',
// xyzzy: 42, // });
// } // let env = await wrap(s, signer)
// let j:any = JSON.stringify(o); // let j:any = env.toJSON();
//
// let v = await handlers.handleClientMergePut(j, db, digest, keystore, signer);
// assert(v); // true-ish
//
// j = await handlers.handleNoMergeGet(db, digest, keystore);
// assert(v); // true-ish
//
// let o = JSON.parse(j);
// o.bar = 'xyzzy';
// j = JSON.stringify(o);
// //
// let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer); // let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
// assert(signMaterial) // assert(signMaterial)
// //
// const env = Envelope.fromJSON(signMaterial); // env = Envelope.fromJSON(signMaterial);
// //
// console.log('envvvv', env); // console.log('envvvv', env);
// //
@ -301,7 +265,7 @@ describe('server', async () => {
// j = JSON.stringify(o); // j = JSON.stringify(o);
// console.log(j); // console.log(j);
// //
// let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer); // v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
// assert(v); // assert(v);
// //
// j = await handlers.handleNoMergeGet(db, digest, keystore); // j = await handlers.handleNoMergeGet(db, digest, keystore);
@ -311,5 +275,88 @@ describe('server', async () => {
// //
// db.close(); // db.close();
// }); // });
//
await it('server_merge_empty', async () => {
const keystore = await createKeystore();
const signer = new PGPSigner(keystore);
const db = await createDatabase(__dirname + '/db.three.sqlite');
const digest = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef';
let o:any = {
foo: 'bar',
xyzzy: 42,
}
let j:any = JSON.stringify(o);
let signMaterial = await handlers.handleServerMergePost(j, db, digest, keystore, signer);
assert(signMaterial)
const env = Envelope.fromJSON(signMaterial);
console.log('envvvv', env);
const signedData = await signData(env.o['digest'], keystore);
console.log('signed', signedData);
o = {
'm': env,
's': signedData,
}
j = JSON.stringify(o);
console.log(j);
let v = await handlers.handleServerMergePut(j, db, digest, keystore, signer);
assert(v);
j = await handlers.handleNoMergeGet(db, digest, keystore);
assert(j); // true-ish
o = JSON.parse(j[0]);
console.log(o);
db.close();
});
await it('immutable_nodigest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
let r;
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
const b:Uint8Array = new TextEncoder().encode(s);
r = await handlers.handleImmutablePost(b, db, undefined, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
await it('immutable_digest', async() => {
const keystore = await createKeystore();
const db = await createDatabase(__dirname + '/db.three.sqlite');
const s:string = 'foo';
const b:Uint8Array = new TextEncoder().encode(s);
let r;
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(b, db, hashOfFoo, keystore, 'application/octet-stream');
assert(!r[0]);
assert(hashOfFoo == r[1]);
r = await handlers.handleImmutablePost(s, db, hashOfFoo, keystore, 'text/plain');
assert(!r[0]);
assert(hashOfFoo == r[1]);
});
}); });

View File

@ -450,6 +450,7 @@ services:
dockerfile: docker/Dockerfile dockerfile: docker/Dockerfile
args: args:
DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics} DOCKER_REGISTRY: ${DEV_DOCKER_REGISTRY:-registry.gitlab.com/grassrootseconomics}
NPM_REPOSITORY: ${DEV_NPM_REPOSITORY:-https://registry.npmjs.org}
environment: environment:
DATABASE_HOST: ${DATABASE_HOST:-postgres} DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432} DATABASE_PORT: ${DATABASE_PORT:-5432}
@ -666,4 +667,4 @@ services:
set +a set +a
./import_ussd.sh ./import_ussd.sh
volumes: volumes:
- contract-config:/tmp/cic/config/:ro - contract-config:/tmp/cic/config/:ro