function TypedError(args) { assert(args, "args is required"); assert(args.type, "args.type is required") assert(args.message, "args.message is required") var message = args.message if (args.type && !args.name) { var errorName = camelize(args.type) + "Error" args.name = errorName[0].toUpperCase() + errorName.substr(1) } createError.type = args.type; createError._name = args.name; return createError; function createError(opts) { var result = new Error() Object.defineProperty(result, "type", { value: result.type, enumerable: true, writable: true, configurable: true }) var options = extend({}, args, opts) extend(result, options) result.message = template(message, options) return result } }
(function read() { var c = r.read(n); if (!c) r.once('readable', read);else { assert.strictEqual(c.length, n); assert(!r._readableState.flowing); then(); } })();
process.on('exit', function (code) { assert.strictEqual(reads, 2); // we pushed up the high water mark assert.strictEqual(stream._readableState.highWaterMark, 8192); // length is 0 right now, because we pulled it all out. assert.strictEqual(stream._readableState.length, 0); assert(!code); assert.strictEqual(depth, 0); console.log('ok'); });
process.on('exit', function (code) { assert.strictEqual(reads, 2); // we pushed up the high water mark assert.strictEqual(stream.readableHighWaterMark, 8192); // length is 0 right now, because we pulled it all out. assert.strictEqual(stream.readableLength, 0); assert(!code); assert.strictEqual(depth, 0); require('tap').pass(); });
var _ref = _asyncToGenerator(function* () { var rs = fs.createReadStream(__filename); var done = common.mustCall(); var ended = false; rs.resume(); rs.on('end', function () { ended = true; }); yield finishedPromise(rs); assert(ended); done(); });
function push(fast) { assert(!pushedNull, 'push() after null push'); var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length)); pushedNull = c === null; if (fast) { pos += n; r.push(c); if (c === null) pushError(); } else { setTimeout(function () { pos += n; r.push(c); if (c === null) pushError(); }, 1); } }
var _ref = _asyncToGenerator(function* () { var read = new Readable({ read: function read() {} }); var write = new Writable({ write: function write(data, enc, cb) { cb(); } }); read.push('data'); read.push(null); var finished = false; write.on('finish', function () { finished = true; }); yield pipelinePromise(read, write); assert(finished); });
r._read = function (n) { assert(!pushedNull, '_read after null push'); // every third chunk is fast push(!(chunks % 3)); function push(fast) { assert(!pushedNull, 'push() after null push'); var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length)); pushedNull = c === null; if (fast) { pos += n; r.push(c); if (c === null) pushError(); } else { setTimeout(function () { pos += n; r.push(c); if (c === null) pushError(); }, 1); } } };
var m = new MyWritable(function (isBuffer, type, enc) { assert(!isBuffer); assert.equal(type, 'string'); assert.equal(enc, 'utf8'); console.log('ok - un-decoded string is not decoded'); }, { decodeStrings: false });
process.on('exit', function () { assert.equal(r._read, _read); assert(_readCalled); });
_pt.end(); assert.strictEqual(_pt.read(), 1); assert.strictEqual(_pt.read(), true); assert.strictEqual(_pt.read(), false); assert.strictEqual(_pt.read(), 0); assert.strictEqual(_pt.read(), 'foo'); assert.strictEqual(_pt.read(), ''); assert.deepStrictEqual(_pt.read(), { a: 'b' }); } { // Verify passthrough constructor behavior var _pt2 = PassThrough(); assert(_pt2 instanceof PassThrough); } { // Verify transform constructor behavior var _pt3 = Transform(); assert(_pt3 instanceof Transform); } { // Perform a simple transform var _pt4 = new Transform(); _pt4._transform = function (c, e, cb) { var ret = bufferShim.alloc(c.length, 'x'); _pt4.push(ret);
process.on('exit', function () { assert(calledRead); });
const eql = (a, b) => assert(match(a, b))
vdom.events.test = function(n){ assert(this == vdom) c += n }
process.on('exit', function () { assert(parsed.val === 42); });
process.on('exit', function () { assert(calledRead); console.log('ok'); });
process.on('exit', function () { assert(passed, 'Large buffer is not handled properly by Writable Stream'); });
return TestStream; }(stream.Transform); var s1 = new stream.PassThrough(); var s2 = new stream.PassThrough(); var s3 = new TestStream(); s1.pipe(s3); // Don't let s2 auto close which may close s3 s2.pipe(s3, { end: false }); // We must write a buffer larger than highWaterMark var big = bufferShim.alloc(s1.writableHighWaterMark + 1, 'x'); // Since big is larger than highWaterMark, it will be buffered internally. assert(!s1.write(big)); // 'tiny' is small enough to pass through internal buffer. assert(s2.write('tiny')); // Write some small data in next IO loop, which will never be written to s3 // Because 'drain' event is not emitted from s1 and s1 is still paused setImmediate(s1.write.bind(s1), 'later'); // Assert after two IO loops when all operations have been done. process.on('exit', function () { assert(passed, 'Large buffer is not handled properly by Writable Stream'); }); function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; }
finished(tr, common.mustCall(function (err) { assert(!err, 'no error'); assert(finish); assert(ended); }));
finished(ws, common.mustCall(function (err) { assert(!err, 'no error'); }));
finished(_rs4, common.mustCall(function (err) { assert(err, 'premature close error'); }));
process.on('exit', function () { assert(serialized[0] === 42); });
var m = new MyWritable(function (isBuffer, type, enc) { assert(isBuffer); assert.equal(type, 'object'); assert.equal(enc, 'buffer'); console.log('ok - decoded string is decoded'); }, { decodeStrings: true });
/*<replacement>*/ var bufferShim = require('buffer-shims'); /*</replacement>*/ require('../common'); var assert = require('assert/'); var Transform = require('../../').Transform; var parser = new Transform({ readableObjectMode: true }); assert(parser._readableState.objectMode); assert(!parser._writableState.objectMode); assert(parser._readableState.highWaterMark === 16); assert(parser._writableState.highWaterMark === 16 * 1024); parser._transform = function (chunk, enc, callback) { callback(null, { val: chunk[0] }); }; var parsed; parser.on('data', function (obj) { parsed = obj; }); parser.end(bufferShim.from([42])); process.on('exit', function () { assert(parsed.val === 42); });
} done(); }; var s1 = new PassThrough(); var s2 = new PassThrough(); var s3 = new TestStream(); s1.pipe(s3); // Don't let s2 auto close which may close s3 s2.pipe(s3, { end: false }); // We must write a buffer larger than highWaterMark var big = bufferShim.alloc(s1._writableState.highWaterMark + 1, 'x'); // Since big is larger than highWaterMark, it will be buffered internally. assert(!s1.write(big)); // 'tiny' is small enough to pass through internal buffer. assert(s2.write('tiny')); // Write some small data in next IO loop, which will never be written to s3 // Because 'drain' event is not emitted from s1 and s1 is still paused setImmediate(s1.write.bind(s1), 'later'); // Assert after two IO loops when all operations have been done. process.on('exit', function () { assert(passed, 'Large buffer is not handled properly by Writable Stream'); }); function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i;
it('should clone', function(){ var obj = { foo: 'bar' }; var cloned = clone(obj); assert(cloned); deepEqual(cloned, obj); });
stream._read = function () { assert(!calledRead); calledRead = true; this.push(null); };
Obj.prototype.fn = function(){ assert(this instanceof Obj) }
/*<replacement>*/ var bufferShim = require('safe-buffer').Buffer; /*</replacement>*/ require('../common'); var assert = require('assert/'); var Transform = require('../../').Transform; var parser = new Transform({ readableObjectMode: true }); assert(parser._readableState.objectMode); assert(!parser._writableState.objectMode); assert.strictEqual(parser.readableHighWaterMark, 16); assert.strictEqual(parser.writableHighWaterMark, 16 * 1024); assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark); assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark); parser._transform = function (chunk, enc, callback) { callback(null, { val: chunk[0] }); }; var parsed; parser.on('data', function (obj) { parsed = obj;
})], ['#footer', ['p', 'Written by: ', new Text(data.author), null, undefined, [null]]]] } var vdom = create(app({ author: 'Jake Rosoman', list: ['one', 'two', 'three'] })) assert.equal(vdom, new Node('div', {className: 'app'}, [ new Node('h1', {id: 'header'}, [new Text('Todos')], {click: console.log}), new Node('ul', {id: 'list'}, [ new Node('li', {className: 'item'}, [new Text('one')]), new Node('li', {className: 'item selected'}, [new Text('two')]), new Node('li', {className: 'item'}, [new Text('three')]) ]), new Node('div', {id: 'footer'}, [ new Node('p', {}, [new Text('Written by: '), new Text('Jake Rosoman')]) ]) ])) var c = 0 vdom.events.test = function(n){ assert(this == vdom) c += n } vdom.emit('test', 1) assert(c == 1)