test('piping multiple rows of buff data with write stream', function(t) { var row1 = buff.pack([bops.from('1'), bops.from('2')]) var row2 = buff.pack([bops.from('3'), bops.from('4')]) common.getDat(t, function(dat, done) { var ws = dat.createWriteStream({ columns: ['a', 'b'] }) ws.on('close', function() { dat.createReadStream().pipe(concat(function(data) { t.equal(data.length, 2) t.equal(data[0].a, '1') t.equal(data[0].b, '2') t.equal(data[1].a, '3') t.equal(data[1].b, '4') done() })) }) var packStream = mbstream.packStream() packStream.pipe(ws) packStream.write(row1) packStream.write(row2) packStream.end() }) })
common.getDat(t, function(dat, done) { var ws = dat.createWriteStream({ columns: ['num'], primary: 'num' }) var nums = [] ws.on('close', function() { dat.createReadStream().pipe(concat(function(data) { var results = data.map(function(r) { return r.num + '\xff' }) t.equals(JSON.stringify(nums), JSON.stringify(results), 'order matches') done() })) }) var packStream = mbstream.packStream() packStream.pipe(ws) // create a bunch of single cell buff rows with incrementing integers in them for (var i = 0; i < 1000; i++) { packStream.write(buff.pack([bops.from(i + '')])) nums.push(i + '\xff') } // sort lexicographically nums.sort() packStream.end() })
function write(row) { var cells = csv.line(row) for (var i = 0; i < cells.length; i++) { cells[i] = csv.cell(cells[i]) } var mb = multibuffer.pack(cells) // with this line commented (and next line changed to `row`), runs in 3m9s this.queue(mb) }
function write(buf) { var cells = csv.line(buf) for (var i = 0; i < cells.length; i++) { cells[i] = csv.cell(cells[i]) } if (onRow) onRow(cells) var mb = multibuffer.pack(cells) this.queue(mb) }
function encode(obj, headers) { var keys = headers || Object.keys(obj) var vals = [] for (var i = 0; i < keys.length; i++) { var key = keys[i] var val = obj[key] if (typeof val === 'object' || val instanceof Array) val = JSON.stringify(val) vals.push(bops.from(isFinite(val) ? val + "" : val)) } return multibuffer.pack(vals) }
getDat(t, function(dat, done) { var row = buff.pack([bops.from('bar')]) dat.put(row, {columns: ['foo']}, function(err) { if (err) throw err var cat = dat.createReadStream() cat.pipe(concat(function(data) { t.equal(data.length, 1) t.equal(data[0].foo, "bar") done() })) }) })
test('piping a single row of buff data with write stream', function(t) { var row = buff.pack([bops.from('bar')]) common.getDat(t, function(dat, done) { var ws = dat.createWriteStream({ columns: ['foo'] }) ws.on('close', function() { dat.createReadStream().pipe(concat(function(data) { t.equal(data.length, 1) t.equal(data[0].foo, 'bar') done() })) }) var packStream = mbstream.packStream() packStream.pipe(ws) packStream.write(row) packStream.end() }) })