function StreamVariable(data, opt) { stream.Readable.call(this, opt); this.data = data ? data.toString() : null; this.chunkSize = opt && opt.size ? opt.size : 4096; }
/** @module types */ /** * Readable stream using to yield data from a result or a field * @constructor */ function ResultStream(opt) { stream.Readable.call(this, opt); this.buffer = []; this.paused = true; }
function fromString(string, options) { var rs = new stream.Readable(); rs.push(string); rs.push(null); return rs.pipe(new ParserStream(options)); }
setImmediate(function() { // This one should *not* get through to writer1 because writer2 is not // "done" processing. reader.push(buffer); });
createReadStream(worker) { let stream = new Readable(); stream._read = _.noop; stream.pipe(worker.process.stdin); this.STBridges.push(stream); }
var TestStream = function () { Stream.Readable.call(this, { objectMode: true }); };
var Readable = require('stream').Readable; var Speaker = require('../'); // node v0.8.x compat if (!Readable) Readable = require('readable-stream/readable'); // the frequency to play var freq = parseFloat(process.argv[2], 10) || 440.0; // Concert A, default tone // seconds worth of audio data to generate before emitting "end" var duration = parseFloat(process.argv[3], 10) || 2.0; console.log('generating a %dhz sine wave for %d seconds', freq, duration); // A SineWaveGenerator readable stream var sine = new Readable(); sine.bitDepth = 16; sine.channels = 2; sine.sampleRate = 44100; sine.samplesGenerated = 0; sine._read = read; // create a SineWaveGenerator instance and pipe it to the speaker sine.pipe(new Speaker()); // the Readable "_read()" callback function function read (n) { var sampleSize = this.bitDepth / 8; var blockAlign = sampleSize * this.channels; var numSamples = n / blockAlign | 0; var buf = new Buffer(numSamples * blockAlign);
/** * Convenient class to convert the process of scaning keys to a readable stream. * * @constructor * @private */ function ScanStream(opt) { Readable.call(this, opt); this._redisCursor = '0'; this.opt = opt; }
const TestStream = function (opt) { Stream.Readable.call(this, opt); this._max = 2; this._index = 1; };
chunks.forEach(function(chunk) { s.push(chunk); });
function stringToStream(str) { let stream = new Readable(); stream.push(str); stream.push(null); return stream; }
it('can parse a stream', function(done) { // http://stackoverflow.com/questions/12755997/how-to-create-streams-from-string-in-node-js var s = new stream.Readable(); s._read = function noop() {}; var p = new Parser(s); var elements = []; p.parse(s); p.on('element', function(element) { elements.push(element); }); // Chop up chunks to simulate data not necessarily // arriving in lines var chunks = [ 'pl', 'y\nformat ascii 1.0\n', 'commen', 't made by anonymous\n', 'comment this file is a cube\n', 'element vertex 8\n', 'prope', 'rty float32 x\n', 'property float32 y\n', 'property float32 z\n', 'element face 6\n', 'property list uint8 int32 vertex_index\n', 'end_header\n', '0 0 0\n', '0 0 1\n', '0 1 1\n', '0 1 0\n', '1 0 0\n', '1 ', '0 1\n', '1 1 1\n', '1 1 0\n', '4 ', '0 1 2 3\n', '4', ' 7 6 5 4\n', '4 0 4 5 1\n', '4 1 ', '5 6 2\n', '4 2 6 7 3\n', ]; chunks.forEach(function(chunk) { s.push(chunk); }); s.push(null); p.on('done', function() { assert.deepEqual(elements, [ { name: 'vertex', x: 0, y: 0, z: 0 }, { name: 'vertex', x: 0, y: 0, z: 1 }, { name: 'vertex', x: 0, y: 1, z: 1 }, { name: 'vertex', x: 0, y: 1, z: 0 }, { name: 'vertex', x: 1, y: 0, z: 0 }, { name: 'vertex', x: 1, y: 0, z: 1 }, { name: 'vertex', x: 1, y: 1, z: 1 }, { name: 'vertex', x: 1, y: 1, z: 0 }, { name: 'face', vertex_index: [ 0, 1, 2, 3 ] }, { name: 'face', vertex_index: [ 7, 6, 5, 4 ] }, { name: 'face', vertex_index: [ 0, 4, 5, 1 ] }, { name: 'face', vertex_index: [ 1, 5, 6, 2 ] }, { name: 'face', vertex_index: [ 2, 6, 7, 3 ] }, ]); done(); }); });
function CountStream(ms) { stream.Readable.call(this); this._n = 0; }
this.req.socket.once('close', function () { snapshots.unpipe(stream) })
stream.on('data', function (data) { result.push(data.key + ' = ' + data.value) })
/** * Constructor * @param {[type]} opts [description] */ function Upstream (opts) { var self = this; opts = opts || {}; _.defaults(opts, { // highWaterMark: 0, objectMode: true, // The max # of ms this Upstream will wait without receiving a file // before getting frustrated and emitting an error. (This will tell // any connected receivers (writestreams) that they ought to just give // up themselves. This, in turn, triggers the callback for `req.file().upload()` // (no buffering is happening, so it's ok for this to be longer) // This needs to be long enough to allow any policies/middleware to run. // Should not need to exceed 500ms in most cases. maxTimeToWaitForFirstFile: 500, // The max # of ms this Upstream will buffer bytes and wait to be plugged // into a receiver. highWaterMark isn't quite enough, since we want to be // allow significant buffering in-memory, but we also want to timeout when the // really silly sort of requests come in. maxTimeToBuffer: 500 }); // Allow `noop` to be passed in to force this Upstream to immediately end. if (opts.noop) this.isNoop = true; // Keep track of file streams which we've emitted. this._files = []; // Keep track of timeout timers. this.timeouts = {}; Readable.call(this, opts); // Enforce the `maxTimeToWaitForFirstFile` option. this.timeouts.untilFirstFileTimer = setTimeout(function () { if (self._files.length === 0) { var e = new Error(); e.code = 'ETIMEOUT'; e.message = e.code + ': '+ 'An Upstream (`'+self.fieldName+'`) timed out waiting for file(s). '+ 'No files were sent after waiting '+opts.maxTimeToWaitForFirstFile+'ms.'; self.fatalIncomingError(e); } }, opts.maxTimeToWaitForFirstFile); // Enforce the `maxTimeToBuffer` option. this.timeouts.untilMaxBufferTimer = setTimeout(function () { if ( !self._connected ) { var e = new Error(); e.code = 'EMAXBUFFER'; e.message = e.code + ': '+ 'An Upstream (`'+self.fieldName+'`) timed out before it was plugged into a receiver. '+ 'It was still unused after waiting '+opts.maxTimeToBuffer+'ms. '+ 'You can configure this timeout by changing the `maxTimeToBuffer` option.'; self.fatalIncomingError(e); } }, opts.maxTimeToBuffer); }
var TestStream = function () { Stream.Readable.call(this); };
var streamWrite = function(chunk, encoding, callback) { if (Buffer.isBuffer(chunk)) { chunk = chunk.toString(encoding); } process.log(chunk); if (callback) { callback(); } return true; }; console.log = console.error = console.warn = consoleLog; process.stdout.write = process.stderr.write = streamWrite; // Always returns EOF for stdin stream. var Readable = require('stream').Readable; var stdin = new Readable; stdin.push(null); process.__defineGetter__('stdin', function() { return stdin; }); } // Don't quit on fatal error. process.on('uncaughtException', function(error) { // Do nothing if the user has a custom uncaught exception handler. var dialog, message, ref, stack; if (process.listeners('uncaughtException').length > 1) { return; }
var TestStream = function () { Stream.Readable.call(this); this.statusCode = 200; };
function Counter(max,options){ Readable.call(this,options); this._max = max; this._index = 0; }
'use strict'; const common = require('../common'); const stream = require('stream'); // This is very similar to test-stream-pipe-cleanup-pause.js. const reader = new stream.Readable(); const writer1 = new stream.Writable(); const writer2 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark // and will cause `.write()` to return false // See: https://github.com/nodejs/node/issues/5820 const buffer = Buffer(560000); reader._read = function(n) {}; writer1._write = common.mustCall(function(chunk, encoding, cb) { this.emit('chunk-received'); cb(); }, 1); writer1.once('chunk-received', function() { setImmediate(function() { // This one should *not* get through to writer1 because writer2 is not // "done" processing. reader.push(buffer); }); }); // A "slow" consumer: writer2._write = common.mustCall(function(chunk, encoding, cb) {
function TestStream(opt) { Readable.call(this, opt); this._done = false; }
setTimeout(function() { ran = true a.push(null) }, 100)
function Counter(opt) { Readable.call(this, opt); this._max = 1000000; this._index = 1; }
listener = function(msgChannel, data) { // We shouldn't get messages after unsubscribe, but it's happened. if (!open || msgChannel !== self._prefixChannel(channels)) return; stream.push(data); };
db.close(function () { result.push(null) })
async uploadString(path, data) { const dataStream = new Readable(); dataStream.push(data); dataStream.push(null); return await this.uploadStream(path, dataStream); }
stream.on('error', function (err) { result.emit('error', err) })
'use strict'; var ncpy = require('../index') var util = require('util') var Readable = require('stream').Readable; var Transform = require('stream').Transform; var SomeStream = new Readable({ "objectMode": true }) SomeStream.push([1,2]) SomeStream.push([20,3]) SomeStream.push([3,40]) SomeStream.push([4,50]) SomeStream.push([55,66]) SomeStream.push(null) function testTransform() { Transform.call(this, { objectMode: true }) } util.inherits(testTransform, Transform) testTransform.prototype._transform = function(data, encoding, done) { // console.log(data); return done() } ncpy.ffi // load the python script and intitialize the python interpreter .require('py/multiplication.py', { path: './examples' }) // this expects a stream (in { objectMode: true }) .init(SomeStream) // Tell `ncpy` what function to excute.
process.nextTick(function() { const r = stream.read(); if (r !== null) writer.write(r); });