Table.prototype.mutate = function(entries, callback) { entries = flatten(arrify(entries)); var grpcOpts = { service: 'Bigtable', method: 'mutateRows' }; var reqOpts = { objectMode: true, tableName: this.id, entries: entries.map(Mutation.parse) }; var isCallbackMode = is.function(callback); var emitter = null; if (!isCallbackMode) { emitter = new events.EventEmitter(); } var stream = pumpify.obj([ this.requestStream(grpcOpts, reqOpts), through.obj(function(data, enc, next) { var throughStream = this; data.entries.forEach(function(entry) { // mutation was successful, no need to notify the user if (entry.status.code === 0) { return; } var status = common.GrpcService.decorateStatus_(entry.status); status.entry = entries[entry.index]; if (!isCallbackMode) { emitter.emit('error', status); return; } throughStream.push(status); }); next(); }) ]); if (!isCallbackMode) { stream.on('error', emitter.emit.bind(emitter, 'error')); stream.on('finish', emitter.emit.bind(emitter, 'complete')); return emitter; } stream .on('error', callback) .pipe(concat(function(mutationErrors) { callback(null, mutationErrors); })); };
function connectWebsocket () { console.log('connecting') var loc = window.location; window.awebsocket = websocket((loc.protocol === 'http:' ? 'ws://' : 'wss://') + loc.host); var test = pumpify.obj(json.stringify(), awebsocket, json.parse()); test.on('data', function (data) { $("#current").html(data); var votes = ""; $.each(data.votes, function(key, value) { votes += "<div class=\"person clearfix\"><div class=\"left\">"+key+"</div><div class=\"right\">"+value+"</div></div>"; }); $("#connected").html(Object.keys(data.votes).length); $("#votes").html(votes); }); var reconnect = once(function() { setTimeout(connectWebsocket, 1000) }) test.on("error", reconnect); test.on('close', reconnect); test.write(localStorage.getItem("user")); window.stream = test; }
wss.on('connection', function(ws) { ws = pumpify.obj(json.stringify(), websocket(ws), json.parse()) ws.vote = null var destroy = function() { ws.destroy() } var timeout = setTimeout(destroy, 5000) ws.once('data', function(username) { ws.username = username connected.push(ws) eos(ws, function() { if (connected.indexOf(ws) > -1) connected.splice(connected.indexOf(ws), 1) }) ws.on('data', function(data) { clearTimeout(timeout) timeout = setTimeout(destroy, 5000) if (data === 'ping') return ws.vote = data }) }) })
value: function uploadImage(endpoint, file, options) { var uploadOpts = {}; if (options && options.forceJPEG === false) { uploadOpts.queryString = { force_jpeg: false }; // eslint-disable-line camelcase } return pumpify.obj(this.upload(endpoint, file, uploadOpts), this.waitFor(options.waitFor), normalizeProgress()); }
LevelDat.prototype.createVersionStream = function(key, opts) { if (this.corked) return this._wait(this.createVersionStream, arguments, true) opts = this._mixin(opts) var prefix = PREFIX_DATA+(opts.subset || '')+SEP opts.start = prefix+key+SEP opts.end = prefix+key+SEP+SEP var stream = through.obj(function(data, enc, cb) { var vidx = data.key.lastIndexOf(SEP) data = { key: data.key.slice(prefix.length, vidx), version: unpack(data.key.slice(vidx+1)), value: data.value } debug('get version (key: %s, version: %d)', data.key, data.version) cb(null, data) }) var rs = this.db.createReadStream(opts) return pumpify.obj(rs, stream) }
Table.prototype.sampleRowKeys = function(callback) { var grpcOpts = { service: 'Bigtable', method: 'sampleRowKeys' }; var reqOpts = { tableName: this.id, objectMode: true }; var stream = pumpify.obj([ this.requestStream(grpcOpts, reqOpts), through.obj(function(key, enc, next) { next(null, { key: key.rowKey, offset: key.offsetBytes }); }) ]); if (!is.function(callback)) { return stream; } stream .on('error', callback) .pipe(concat(function(keys) { callback(null, keys); })); };
function parseResult (resFmt) { var lastStream = (resFmt === 'fasta') ? fasta.obj : through.obj var stream = pumpify.obj( requestStream('true'), preProcess(), lastStream() ) return stream function preProcess () { var stream = through.obj(transform) return stream function transform (chunk, enc, cb) { var self = this if (resFmt === 'xml') { xml2js(chunk.body, function (err, data) { if (err) { self.emit('error', err); return cb() } self.push(data) cb() }) } else if (resFmt === 'fasta') { self.push(chunk.body) cb() } else { self.push({result: chunk.body}) cb() } } } }
module.exports = function blockTree (filepath, callback) { var pipeline = pumpify.obj( parser(), filter.obj(function (line) { return line.type === 'block'; }), map.obj(function (block) { return block.content; }) ); if (filepath) { callback = callback || noop; streamFile(filepath) .pipe(pipeline) .pipe(concat({object: true}, function (blocks) { callback(null, blocks); })) .on('error', callback); } return pipeline; };
N.wire.on(apiPath, async function get_blogs_sitemap(data) { let buffer = []; buffer.push({ loc: N.router.linkTo('blogs.index', {}), lastmod: new Date() }); let user_ids = await N.models.blogs.BlogEntry.distinct('user'); let users = await N.models.users.User.find() .where('_id').in(user_ids) .select('hid') .sort('hid') .lean(true); for (let user of users) { buffer.push({ loc: N.router.linkTo('blogs.sole', { user_hid: user.hid }) }); } let user_id_to_hid = users.reduce((acc, user) => { acc[user._id] = user.hid; return acc; }, {}); users = null; let entry_stream = pumpify.obj( N.models.blogs.BlogEntry.find() .where('st').equals(N.models.blogs.BlogEntry.statuses.VISIBLE) .select('hid user') .sort('hid') .lean(true) .cursor(), through2.obj(function (entry, encoding, callback) { let hid = user_id_to_hid[entry.user]; if (hid) { this.push({ loc: N.router.linkTo('blogs.entry', { user_hid: hid, entry_hid: entry.hid }) }); } callback(); }) ); data.streams.push({ name: 'blogs', stream: multi.obj([ from2.obj(buffer), entry_stream ]) }); });
dat.createReadStream = function(opts) { if (!opts) opts = {} var pipeline = [this.storage.createReadStream(opts), decoder(this)] if (opts.csv || opts.format === 'csv') pipeline.push(csvWriter({headers: this.headers()})) else if (opts.json || opts.format === 'json') pipeline.push(ldj.serialize()) return pumpify.obj(pipeline) }
.then(() => { callback(null, pumpify.obj( SiteMapFile(), zlib.createGzip(), N.models.core.FileTmp.createWriteStream({ filename, contentType: 'application/x-gzip' }) )); }, err => callback(err));
function next(callback, previousStream) { if (page >= 4) return callback() var req = request({ url: 'https://api.github.com/repos/joyent/node/events?page=' + (++page), headers: {'user-agent': 'pug'} }) var stream = pumpify.obj(req, JSONStream.parse('*')) callback(null, stream) }
methods.streamingRecognize = function(streamingConfig, options) { options = options || {}; streamingConfig = streamingConfig || {}; // Format the audio content as input request for pipeline const recognizeStream = streamEvents(pumpify.obj()); const requestStream = this._innerApiCalls .streamingRecognize(options) .on('error', err => { recognizeStream.destroy(err); }) .on('response', response => { recognizeStream.emit('response', response); }); // Attach the events to the request stream, but only do so // when the first write (of data) comes in. // // This also means that the sending of the initial request (with the // config) is delayed until we get the first burst of data. recognizeStream.once('writing', () => { // The first message should contain the streaming config. requestStream.write({streamingConfig}); // Set up appropriate piping between the stream returned by // the underlying API method and the one that we return. recognizeStream.setPipeline([ // Format the user's input. // This entails that the user sends raw audio; it is wrapped in // the appropriate request structure. through.obj((audioContent, _, next) => { if (audioContent !== undefined) { next(null, {audioContent}); return; } next(); }), requestStream, through.obj((response, enc, next) => { if (response.error) { next(new common.util.ApiError(response.error)); return; } next(null, response); }), ]); }); return recognizeStream; };
ncbi.link = function (srcDB, destDB, srcUID, cb) { insight.track('ncbi', 'link') var opts = typeof srcDB === 'string' ? { srcDB, destDB, srcUID } : srcDB var stream = pumpify.obj( createAPILinkURL(opts.srcDB, opts.destDB), requestStream(true), createLinkObj() ) if (opts.srcUID) { stream.write(opts.srcUID); stream.end() } if (cb) { stream.on('data', cb) } else { return stream } }
LevelDat.prototype.createReadStream = function(opts) { if (this.corked) return this._wait(this.createReadStream, arguments, true) opts = this._mixin(opts) var self = this var keys = opts.keys !== false var values = opts.values !== false var subset = opts.subset || '' var pre = PREFIX_CUR+subset+SEP var ropts = {} if (opts.start) ropts.start = pre+(opts.start || '') else if (opts.gte) ropts.gte = pre+opts.gte else ropts.gt = pre+(opts.gt || '') if (opts.end) ropts.end = pre+opts.end else if (opts.lte) ropts.lte = pre+opts.lte else ropts.lt = pre+(opts.lt || SEP) if (opts.reverse) ropts.reverse = true if (opts.limit) ropts.limit = opts.limit var rs = self.db.createReadStream(fixRange(ropts)) var get = through.obj(function(data, enc, cb) { var val = data.value var key = data.key.slice(PREFIX_CUR.length) var subset = key.slice(0, key.indexOf(SEP)) key = key.slice(subset.length+1) if (deleted(val)) return cb() var version = unpack(val) self.mutex.get(PREFIX_DATA+subset+SEP+key+SEP+val, opts, function(err, data) { if (err) return cb(err) debug('get data.%s (version: %d)', key, version) if (values && !keys) return cb(null, data) if (keys && !values) return cb(null, key) cb(null, { key: key, version: version, value: data }) }) }) return pumpify.obj(rs, get) }
ncbi.download = function (db, term, cb) { insight.track('ncbi', 'download') var opts = typeof db === 'string' ? { db: db, term } : db opts.db = opts.db var stream = pumpify.obj( ncbi.urls(opts.db), download(opts) ) if (opts.term) { stream.write(opts.term); stream.end() } if (cb) { stream.pipe(concat(cb)) } else { return stream } }
function fetchByID (db) { var xmlProperties = XMLPROPERTIES[db] || through.obj() var lastStream = LASTSTREAM[db] || through.obj var stream = pumpify.obj( requestStream(true), tool.extractProperty('body.result'), tool.deleteProperty('uids'), tool.arraySplit(), tool.XMLToJSProperties(xmlProperties), lastStream() ) return stream }
module.exports.write = function () { var stream = through.obj(transform) return pumpify.obj(split(), stream) function transform (obj, enc, next) { try { obj = JSON.parse(obj) var fastaString = '>' + obj.id + '\n' + obj.seq.match(/.{1,80}/g).join('\n') + '\n' this.push(fastaString) } catch (e) {} next() } }
ncbi.urls = function (db, term, cb) { insight.track('ncbi', 'urls') var opts = typeof db === 'string' ? { db } : db cb = typeof term === 'function' ? term : cb var extractFiles = ['gff', 'gpff', 'fasta', 'fna', 'faa', 'repeats'] if (extractFiles.indexOf(db) !== -1) { opts.db = 'assembly' } var stream = pumpify.obj( ncbi.search(opts), createFTPURL(opts.db) ) if (term) { stream.write(term); stream.end() } if (cb) { stream.pipe(concat(cb)) } else { return stream } }
function fasta (arg1, arg2, arg3) { var params = paramsParser(arg1, arg2, arg3) var contentParser if (params.options.objectMode) { contentParser = pumpify.obj(fastaParser(), jsParse()) } else { contentParser = fastaParser() } var filesParser = through.obj(transform) function transform (obj, enc, next) { var self = this var unzip = obj.split('.').pop() === 'gz' ? zlib.Gunzip() : through() var includepath = params.options.includePath ? includePath(obj) : through() var jsparse, pumpit if (params.options.objectMode) { jsparse = jsParse() pumpit = pumpify.obj } else { jsparse = through() pumpit = pumpify } var pipeline = pumpit( fs.createReadStream(obj), unzip, fastaParser(), includepath, jsparse ) pipeline .on('error', function (error) { self.emit('error', error) }) .on('data', function (data) { self.push(data) }) .on('end', function () { self.push(null) }) next() } var stream = params.filename || params.options.filenameMode ? filesParser : contentParser if (params.filename) { stream.write(params.filename) } if (params.callback) { stream.on('error', params.callback) stream.pipe(concat(function (data) { params.callback(null, data) })) } return stream }
ncbi.fetch = function (db, term, cb) { insight.track('ncbi', 'fetch') var opts = typeof db === 'string' ? { db: db, term: term } : db cb = typeof term === 'function' ? term : cb var rettypes = { bioproject: 'xml', biosample: 'full', biosystems: 'xml', gds: 'summary', gene: '', homologene: 'fasta', mesh: 'full', nlmcatalog: 'xml', nuccore: 'fasta', nucest: 'fasta', nucgss: 'fasta', protein: 'fasta', popset: 'fasta', pmc: '', pubmed: '', snp: 'fasta', sra: 'full', taxonomy: '' } var retmodes = { fasta: 'fasta', 'native': 'xml', full: 'xml', xml: 'xml', '': 'xml', 'asn.1': 'asn.1' } opts.rettype = opts.rettype || rettypes[opts.db] opts.retmode = retmodes[opts.rettype] || 'text' var stream = pumpify.obj( createAPISearchUrl(opts.db, opts.term), requestStream(true), createAPIPaginateURL(opts), requestStream(true), createAPIFetchUrl(opts, stringifyExtras(opts)), parseResult(opts.retmode) ) if (opts.term) { stream.write(opts.term); stream.end() } if (cb) { stream.pipe(concat(cb)) } else { return stream } }
Speech.prototype.createRecognizeStream = function(config) { var self = this; var verboseMode = config.verbose === true; delete config.verbose; var gaxOptions = {}; if (is.number(config.timeout)) { gaxOptions.timeout = config.timeout * 1000; delete config.timeout; } var recognizeStream = streamEvents(pumpify.obj()); recognizeStream.once('writing', function() { var requestStream = self.api.Speech.streamingRecognize(gaxOptions); requestStream.on('response', function(response) { recognizeStream.emit('response', response); }); requestStream.write({ streamingConfig: config }); this.setPipeline([ // Format the user's input. through.obj(function(obj, _, next) { next(null, { audioContent: obj }); }), requestStream, // Format the results. through.obj(function(obj, _, next) { obj.results = Speech.formatResults_(obj.results, verboseMode); next(null, obj); }) ]); }); return recognizeStream; };
value: function upload(endpoint, file, options) { var formData = new window.FormData(); formData.append('file', file); var req = this.stream().post(endpoint, null, options); req.xhr.upload.addEventListener('progress', function (progressEvent) { var percent = progressEvent.lengthComputable ? Math.ceil(progressEvent.loaded / progressEvent.total * 100) : -1; req.push('{"percent": ' + percent + ',"status": "uploading"}\n'); }); req.end(formData); return pumpify.obj(req, split('\n'), filter(function (line) { return line && line.trim().length > 0; }), parseJSON(), checkError()); }
LevelDat.prototype.createChangesWriteStream = function(opts) { if (this.corked) return this._wait(this.createChangesWriteStream, arguments, true) opts = this._mixin(opts) var self = this var buffer = byteStream({ limit: WRITE_BUFFER_SIZE, time: 3000 }) var format = through.obj(function(data, enc, cb) { if (!data.value && data.to !== 0) return cb(new Error('data.value is required')) data.length = data.value ? data.value.length || 1 : 1 cb(null, data) }) var ws = through.obj(function(batch, enc, cb) { var wait = waiter(batch.length, cb) for (var i = 0; i < batch.length; i++) { var b = batch[i] var subset = b.subset || '' debug('put change (change: %d, key: %s, to: %s, from: %s)', b.change, b.key, b.to, b.from) if (b.change !== self.change+1) return cb(changeConflict()) self.change = b.change if (b.to === 0) { self._change(b.change, b.key, b.from, 0, subset, null) self.mutex.put(PREFIX_CUR+subset+SEP+b.key, pack(b.from)+SEP+'1', wait) } else { var v = pack(b.to) self._change(b.change, b.key, b.from, b.to, subset, b.value) self.mutex.put(PREFIX_CUR+subset+SEP+b.key, v, noop) self.mutex.put(PREFIX_DATA+subset+SEP+b.key+SEP+v, b.value, opts, wait) } } }) return pumpify.obj(format, buffer, ws) }
internals.forOwn(this.settings.reporters, (reporterName, streamsSpec) => { const streamObjs = []; for (let i = 0; i < streamsSpec.length; ++i) { const spec = streamsSpec[i]; // Already created stream if (typeof spec.pipe === 'function') { streamObjs.push(spec); continue; } // if this is stderr or stdout if (process[spec]) { streamObjs.push(process[spec]); continue; } let Ctor = require(spec.module); Ctor = spec.name ? Ctor[spec.name] : Ctor; Hoek.assert(typeof Ctor === 'function', `Error in ${reporterName}. ${spec.module} must be a constructor function.`); const ctorArgs = spec.args ? spec.args.slice() : []; ctorArgs.unshift(null); Ctor = Ctor.bind.apply(Ctor, ctorArgs); const stream = new Ctor(); Hoek.assert(typeof stream.pipe === 'function', `Error in ${reporterName}. ${spec.module} must create a stream that has a pipe function.`); streamObjs.push(stream); } if (streamObjs.length === 1) { streamObjs.unshift(new Utils.NoOp()); } this._reporters[reporterName] = Pumpify.obj(streamObjs); this._reporters[reporterName].on('error', () => { console.error(`There was a problem in ${reporterName} and it has been destroyed.`); }); });
ncbi.search = function (db, term, cb) { insight.track('ncbi', 'search') var opts = typeof db === 'string' ? { db, term } : db cb = typeof term === 'function' ? term : cb if (Object.keys(validDbs.dbs).indexOf(opts.db) < 0) { throw new InvalidDbError('The database "' + opts.db + '" is not a valid ncbi database') } var stream = pumpify.obj( createAPISearchUrl(opts.db, opts.term), requestStream(true), createAPIPaginateURL(opts), requestStream(true), createAPIDataUrl(), fetchByID(opts.db) ) if (opts.term) { stream.write(opts.term); stream.end() } if (cb) { stream.pipe(concat(cb)) } else { return stream } }
Table.prototype.sampleRowKeysStream = function() { var grpcOpts = { service: 'Bigtable', method: 'sampleRowKeys' }; var reqOpts = { tableName: this.id, objectMode: true }; return pumpify.obj([ this.requestStream(grpcOpts, reqOpts), through.obj(function(key, enc, next) { next(null, { key: key.rowKey, offset: key.offsetBytes }); }) ]); };
N.wire.on(apiPath, function get_users_sitemap(data) { let stream = pumpify.obj( N.models.users.User.collection .find({ exists: true }, { hid: 1, last_active_ts: 1 }) .sort({ hid: 1 }) .stream(), through2.obj(function (user, encoding, callback) { this.push({ loc: N.router.linkTo('users.member', { user_hid: user.hid }), lastmod: user.last_active_ts }); callback(); }) ); data.streams.push({ name: 'users', stream }); });
module.exports = function randoCsv (headers, count, options) { const writer = csvWriter(options) const csvHeaders = [] const types = {} const methods = {} const args = {} headers.forEach((header) => { csvHeaders.push(header.name) types[header.name] = header.type methods[header.name] = header.method args[header.name] = header.args }) let row = {} let headersCount = csvHeaders.length const ts = through.obj((chunk, enc, cb) => { const type = types[chunk] const method = methods[chunk] const arg = args[chunk] row[chunk] = faker[type][method](arg) headersCount -= 1 if (headersCount === 0) { ts.push(row) headersCount = csvHeaders.length } cb() }) const ss = nes.obj(() => { if (count === 0) return null count -= 1 return from.obj(csvHeaders) }) return pumpify.obj(ss, ts, writer) }
that.events = function(opts) { if (!opts) opts = {} var events = pumpify.obj() var names = {} var lookup = function(id, cb) { if (names[id]) return cb(names[id]) that.inspect(id, function(err, data) { if (err) return cb(null) cb(names[id] = data.name) }) } var map = through.obj(function(data, enc, cb) { var onname = function(name) { if (data.status === 'destroy') delete names[data.id] cb(null, { status: data.status, id: data.id.slice(0, 12), name: name, image: encodeImage(data.from).replace('@latest', ''), time: new Date(data.time * 1000) }) } if (opts.name) lookup(data.id, onname) else onname(null) }) request.get('/events', {agent: false}, function(err, response) { if (err) return events.destroy(err) events.setPipeline(response, parse(), map) }) return events }