2021-02-21 16:41:37 +01:00
|
|
|
const fs = require('fs');
|
|
|
|
const path = require('path');
|
|
|
|
const http = require('http');
|
|
|
|
|
2021-05-21 11:42:08 +02:00
|
|
|
const cic = require('@cicnet/cic-client-meta');
|
|
|
|
const crdt = require('@cicnet/crdt-meta');
|
2021-02-21 16:41:37 +01:00
|
|
|
|
|
|
|
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
|
|
|
|
2021-04-28 11:11:39 +02:00
|
|
|
const config = new crdt.Config('./config');
|
2021-02-21 16:41:37 +01:00
|
|
|
config.process();
|
|
|
|
console.log(config);
|
|
|
|
|
|
|
|
|
|
|
|
function sendit(uid, envelope) {
|
|
|
|
const d = envelope.toJSON();
|
|
|
|
|
|
|
|
const contentLength = (new TextEncoder().encode(d)).length;
|
|
|
|
const opts = {
|
|
|
|
method: 'PUT',
|
|
|
|
headers: {
|
|
|
|
'Content-Type': 'application/json',
|
|
|
|
'Content-Length': contentLength,
|
|
|
|
'X-CIC-AUTOMERGE': 'client',
|
|
|
|
|
|
|
|
},
|
|
|
|
};
|
|
|
|
let url = config.get('META_URL');
|
|
|
|
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
|
|
|
console.log('posting to url: ' + url + uid);
|
|
|
|
const req = http.request(url + uid, opts, (res) => {
|
|
|
|
res.on('data', process.stdout.write);
|
|
|
|
res.on('end', () => {
|
2021-06-29 00:40:54 +02:00
|
|
|
if (!res.complete) {
|
|
|
|
console.log('The connection was terminated while the message was being sent.')
|
|
|
|
}
|
2021-02-21 16:41:37 +01:00
|
|
|
console.log('result', res.statusCode, res.headers);
|
|
|
|
});
|
|
|
|
});
|
2021-06-29 00:40:54 +02:00
|
|
|
req.on('error', (err) => {
|
|
|
|
console.log('ERROR when talking to meta', err)
|
|
|
|
})
|
|
|
|
req.write(d)
|
2021-02-21 16:41:37 +01:00
|
|
|
req.end();
|
|
|
|
}
|
|
|
|
|
|
|
|
function doOne(keystore, filePath) {
|
2021-04-28 11:11:39 +02:00
|
|
|
const signer = new crdt.PGPSigner(keystore);
|
2021-02-21 16:41:37 +01:00
|
|
|
const parts = path.basename(filePath).split('.');
|
|
|
|
const ethereum_address = path.basename(parts[0]);
|
|
|
|
|
|
|
|
cic.User.toKey('0x' + ethereum_address).then((uid) => {
|
|
|
|
const d = fs.readFileSync(filePath, 'utf-8');
|
|
|
|
const o = JSON.parse(d);
|
|
|
|
//console.log(o);
|
|
|
|
fs.unlinkSync(filePath);
|
|
|
|
|
2021-04-28 11:11:39 +02:00
|
|
|
const s = new crdt.Syncable(uid, o);
|
2021-02-21 16:41:37 +01:00
|
|
|
s.setSigner(signer);
|
|
|
|
s.onwrap = (env) => {
|
2021-06-29 00:40:54 +02:00
|
|
|
console.log(`Sending uid: ${uid} and env: ${env} to meta`)
|
2021-02-21 16:41:37 +01:00
|
|
|
sendit(uid, env);
|
|
|
|
};
|
|
|
|
s.sign();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
const privateKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
|
|
|
|
const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
|
|
|
|
pk = fs.readFileSync(privateKeyPath);
|
|
|
|
pubk = fs.readFileSync(publicKeyPath);
|
|
|
|
|
2021-04-28 11:11:39 +02:00
|
|
|
new crdt.PGPKeyStore(
|
2021-02-21 16:41:37 +01:00
|
|
|
config.get('PGP_PASSPHRASE'),
|
|
|
|
pk,
|
|
|
|
pubk,
|
|
|
|
undefined,
|
|
|
|
undefined,
|
|
|
|
importMeta,
|
|
|
|
);
|
|
|
|
|
2021-03-07 19:01:44 +01:00
|
|
|
const batchSize = 16;
|
2021-02-21 16:41:37 +01:00
|
|
|
const batchDelay = 1000;
|
|
|
|
const total = parseInt(process.argv[3]);
|
|
|
|
const workDir = path.join(process.argv[2], 'meta');
|
|
|
|
let count = 0;
|
|
|
|
let batchCount = 0;
|
|
|
|
|
|
|
|
|
|
|
|
function importMeta(keystore) {
|
2021-06-29 00:40:54 +02:00
|
|
|
console.log('Running importMeta....')
|
2021-02-21 16:41:37 +01:00
|
|
|
let err;
|
|
|
|
let files;
|
|
|
|
|
|
|
|
try {
|
|
|
|
err, files = fs.readdirSync(workDir);
|
|
|
|
} catch {
|
|
|
|
console.error('source directory not yet ready', workDir);
|
|
|
|
setTimeout(importMeta, batchDelay, keystore);
|
|
|
|
return;
|
|
|
|
}
|
2021-06-29 00:40:54 +02:00
|
|
|
console.log(`Trying to read ${files.length} files`)
|
|
|
|
if (files === 0) {
|
|
|
|
console.log(`ERROR did not find any files under ${workDir}. \nLooks like there is no work for me, bailing!`)
|
|
|
|
process.exit(1)
|
|
|
|
}
|
2021-02-21 16:41:37 +01:00
|
|
|
let limit = batchSize;
|
|
|
|
if (files.length < limit) {
|
|
|
|
limit = files.length;
|
|
|
|
}
|
|
|
|
for (let i = 0; i < limit; i++) {
|
|
|
|
const file = files[i];
|
|
|
|
if (file.substr(-5) != '.json') {
|
|
|
|
console.debug('skipping file', file);
|
2021-04-09 15:00:15 +02:00
|
|
|
continue;
|
2021-02-21 16:41:37 +01:00
|
|
|
}
|
|
|
|
const filePath = path.join(workDir, file);
|
|
|
|
doOne(keystore, filePath);
|
|
|
|
count++;
|
|
|
|
batchCount++;
|
2021-06-29 00:40:54 +02:00
|
|
|
//console.log('done one', count, batchCount)
|
2021-02-21 16:41:37 +01:00
|
|
|
if (batchCount == batchSize) {
|
|
|
|
console.debug('reached batch size, breathing');
|
|
|
|
batchCount=0;
|
|
|
|
setTimeout(importMeta, batchDelay, keystore);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (count == total) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
setTimeout(importMeta, 100, keystore);
|
|
|
|
}
|