// Build a merkle tree from everything in `dir` // and store it in `db`, then call `fn`. function build(dir, db, fn) { var merkle = Merkle(db, 'merkle'); // This one is ~4x faster but consumes // more memory. // Store all the file hashes in memory, // then write to db in one batch. if (true) { var ops = []; var wr = Writable({ objectMode: true }); wr._write = function(file, _, done) { ops.push({ key: hashFile(file.path, file.stat), value: file.path }); done(); }; flat(dir) .pipe(wr) .on('finish', function() { db.batch(ops, fn); }); } // This one is slower but consumes less memory, // as nothing is buffered // Stream files to the db. if (false) { var tr = Transform({ objectMode: true }); tr._transform = function(file, _, done) { done(null, { key: hashFile(file.path, file.stat), value: file.path }); }; flat(dir) .pipe(tr) .pipe(db.createWriteStream()) .on('close', fn); } return merkle; }
var net = require('net'); var level = require('level'); var merkle = require('level-merkle'); var sub = require('level-sublevel'); var live = require('level-live-stream'); var serialize = require('stream-serializer').json; var noop = function(){}; var id = Number(process.argv[2]); if (isNaN(id)) throw new Error('id required'); console.log('id: %s', id); var db = sub(level(__dirname + '/db-' + id)); var master = merkle(db, 'repl'); process.stdin.on('data', function(buf) { db.put(Date.now(), buf.toString().replace(/\n/, '')); }); // this shouldn't be necessary var opts = { end: '\xff' }; live(db, opts).on('data', function(kv) { console.log('%s: %s', kv.key, kv.value); }); net.createServer(function(con) { // should serialize itself con.pipe(serialize(master.createStream())).pipe(con); }).listen(8000 + id);