Example #1
0
 server.use(function(req, res) {
   fs.createReadStream(indexPath).pipe(res);
 });
Example #2
0
File: sat.js Project: i4han/cubesat
const cp = (s, t) => fs.createReadStream(s).pipe(fs.createWriteStream(t))
Example #3
0
      .on('file', function(name, file) {

		filenames.push(file.name);
		remaining++;
		
		var fn = file.path.split( slach );
		
		if (bucket.events.upload) {
			bucket.events.upload.run(ctx, {url: ctx.url, fileSize: file.size, fileName: ctx.url}, function(err) {
				if (err) return uploadedFile(err);
				bucket.uploadFile(curcafe,fn[fn.length-1] || file.name || ctx.url, file.size, file.type, fs.createReadStream(file.path), uploadedFile);
			});
		} 
		else {
			bucket.uploadFile(curcafe,fn[fn.length-1] || file.name || ctx.url, file.size, file.type, fs.createReadStream(file.path), uploadedFile);
		}
	  })
Example #4
0
 setTimeout(() => {
   fs.createReadStream('index.html').pipe(res);
 }, 5000);
 beforeEach(function (done) {
   fs.unlinkSync(shrinkWrapPath)
   fs.createReadStream(__dirname + '/fixture.npm-shrinkwrap.json')
   .pipe(fs.createWriteStream(__dirname + '/npm-shrinkwrap.json'))
   .on('finish', done)
 })
Example #6
0
app.router.get('/', function(){
	this.res.writeHead(200, {"content-type": "text/html"});
	fs.createReadStream(__dirname + "/public/index.html").pipe(this.res);
});
Example #7
0
function copyFile(f1,f2) {
	fs.createReadStream(f1).pipe(fs.createWriteStream(f2));
}
Example #8
0
#!/usr/bin/env node
'use strict';

var fs = require('fs');

var concat = require('concat-stream');
var render = require('marked');
var TerminalRenderer = require('marked-terminal');

render.setOptions({
  renderer: new TerminalRenderer()
});

var file = process.argv[2];

var input;

if (!file || file === '-') {
  input = process.stdin;
} else {
  input = fs.createReadStream(file);
}

var write = concat(function (data) {
  process.stdout.write(render(data.toString()));
  process.exit();
});

input.pipe(write);
Example #9
0
#!/usr/bin/env node
var reverse = require('../')

var fs = require('fs')
  , PassThrough = require('stream').PassThrough
  , splitStream = new PassThrough()
  , values = ['one', 'two', 'three', 'four', 'five']

fs.createWriteStream('file.txt').end(values.join('\n'))

splitStream._transform = function (chunk, enc, next) {
  String(chunk).split('\n').forEach(function (piece) {
    splitStream.push(piece)
  })
  next()
}

var readFile = fs.createReadStream('file.txt')
readFile.on('end', function () {
  fs.unlink('file.txt')
})

readFile
  .pipe(splitStream)
  .pipe(reverse())
  .pipe(process.stdout)
Example #10
0
s.listen(s.port, function () {
  var counter = 0;

  var check = function () {
    counter = counter - 1
    if (counter === 0) {
      console.log('All tests passed.')
      setTimeout(function () {
        process.exit();
      }, 500)
    }
  }

  // Test pipeing to a request object
  s.once('/push', server.createPostValidator("mydata"));

  var mydata = new stream.Stream();
  mydata.readable = true

  counter++
  var r1 = request.put({url:'http://localhost:3453/push'}, function () {
    check();
  })
  mydata.pipe(r1)

  mydata.emit('data', 'mydata');
  mydata.emit('end');

  // Test pipeing to a request object with a json body
  s.once('/push-json', server.createPostValidator("{\"foo\":\"bar\"}", "application/json"));

  var mybodydata = new stream.Stream();
  mybodydata.readable = true

  counter++
  var r2 = request.put({url:'http://localhost:3453/push-json',json:true}, function () {
    check();
  })
  mybodydata.pipe(r2)

  mybodydata.emit('data', JSON.stringify({foo:"bar"}));
  mybodydata.emit('end');

  // Test pipeing from a request object.
  s.once('/pull', server.createGetResponse("mypulldata"));

  var mypulldata = new stream.Stream();
  mypulldata.writable = true

  counter++
  request({url:'http://localhost:3453/pull'}).pipe(mypulldata)

  var d = '';

  mypulldata.write = function (chunk) {
    d += chunk;
  }
  mypulldata.end = function () {
    assert.equal(d, 'mypulldata');
    check();
  };


  s.on('/cat', function (req, resp) {
    if (req.method === "GET") {
      resp.writeHead(200, {'content-type':'text/plain-test', 'content-length':4});
      resp.end('asdf')
    } else if (req.method === "PUT") {
      assert.equal(req.headers['content-type'], 'text/plain-test');
      assert.equal(req.headers['content-length'], 4)
      var validate = '';

      req.on('data', function (chunk) {validate += chunk})
      req.on('end', function () {
        resp.writeHead(201);
        resp.end();
        assert.equal(validate, 'asdf');
        check();
      })
    }
  })
  s.on('/pushjs', function (req, resp) {
    if (req.method === "PUT") {
      assert.equal(req.headers['content-type'], 'application/javascript');
      check();
    }
  })
  s.on('/catresp', function (req, resp) {
    request.get('http://localhost:3453/cat').pipe(resp)
  })
  s.on('/doodle', function (req, resp) {
    if (req.headers['x-oneline-proxy']) {
      resp.setHeader('x-oneline-proxy', 'yup')
    }
    resp.writeHead('200', {'content-type':'image/jpeg'})
    fs.createReadStream(path.join(__dirname, 'googledoodle.jpg')).pipe(resp)
  })
  s.on('/onelineproxy', function (req, resp) {
    var x = request('http://localhost:3453/doodle')
    req.pipe(x)
    x.pipe(resp)
  })

  counter++
  fs.createReadStream(__filename).pipe(request.put('http://localhost:3453/pushjs'))

  counter++
  request.get('http://localhost:3453/cat').pipe(request.put('http://localhost:3453/cat'))

  counter++
  request.get('http://localhost:3453/catresp', function (e, resp, body) {
    assert.equal(resp.headers['content-type'], 'text/plain-test');
    assert.equal(resp.headers['content-length'], 4)
    check();
  })

  var doodleWrite = fs.createWriteStream(path.join(__dirname, 'test.jpg'))

  counter++
  request.get('http://localhost:3453/doodle').pipe(doodleWrite)

  doodleWrite.on('close', function () {
    assert.deepEqual(fs.readFileSync(path.join(__dirname, 'googledoodle.jpg')), fs.readFileSync(path.join(__dirname, 'test.jpg')))
    check()
  })

  process.on('exit', function () {
    fs.unlinkSync(path.join(__dirname, 'test.jpg'))
  })

  counter++
  request.get({uri:'http://localhost:3453/onelineproxy', headers:{'x-oneline-proxy':'nope'}}, function (err, resp, body) {
    assert.equal(resp.headers['x-oneline-proxy'], 'yup')
    check()
  })

  s.on('/afterresponse', function (req, resp) {
    resp.write('d')
    resp.end()
  })

  counter++
  var afterresp = request.post('http://localhost:3453/afterresponse').on('response', function () {
    var v = new ValidationStream('d')
    afterresp.pipe(v)
    v.on('end', check)
  })

  s.on('/forward1', function (req, resp) {
   resp.writeHead(302, {location:'/forward2'})
    resp.end()
  })
  s.on('/forward2', function (req, resp) {
    resp.writeHead('200', {'content-type':'image/png'})
    resp.write('d')
    resp.end()
  })

  counter++
  var validateForward = new ValidationStream('d')
  validateForward.on('end', check)
  request.get('http://localhost:3453/forward1').pipe(validateForward)

  // Test pipe options
  s.once('/opts', server.createGetResponse('opts response'));

  var optsStream = new stream.Stream();
  optsStream.writable = true

  var optsData = '';
  optsStream.write = function (buf) {
    optsData += buf;
    if (optsData === 'opts response') {
      setTimeout(check, 10);
    }
  }

  optsStream.end = function () {
    assert.fail('end called')
  };

  counter++
  request({url:'http://localhost:3453/opts'}).pipe(optsStream, { end : false })

  // test request.pipefilter is called correctly
  counter++
  s.on('/pipefilter', function(req, resp) {
    resp.end('d')
  })
  var validatePipeFilter = new ValidationStream('d')

  var r3 = request.get('http://localhost:3453/pipefilter')
  r3.pipe(validatePipeFilter)
  r3.pipefilter = function(resp, dest) {
    assert.equal(resp, r3.response)
    assert.equal(dest, validatePipeFilter)
    check()
  }
})
Example #11
0
// Write an array of payment objects (includes filer_ID and amount)
// ======================

var fs = require('fs'),
	_ = require('lodash'),
	path = require('path'),
	through = require('through'),
	csv = require('csv-parser');

var inputPath = path.normalize('../../data/Form_460_-_Schedule_E_-_Payments_Made.csv');
var outputPath = path.normalize('../../data/Schedule_E_-_Payee_details.json');
var allPayees = [];

fs.createReadStream(inputPath)
  .pipe(csv())
  .pipe(through(function(cont){
    
  	var payment = {
  		filer_ID: cont.Filer_ID,	// who filed recieving the payment
      lastname: cont.Payee_NamL,
      name: cont.Payee_NamF + ' ' + cont.Payee_NamL,
  		amount: cont.Amount,	// amount of payment
  		title: cont.Payee_NamT,
  		suffix: cont.Payee_NamS,
  		address1: cont.Payee_Adr1,
  		address2: cont.Payee_Adr2,
  		city: cont.Payee_City,
  		state: cont.Payee_State,
  		zip: cont.Payee_Zip4,
  	};
  	this.queue(payment);
Example #12
0
var fs = require('fs');
var filePath = process.argv[2];

fs.createReadStream(filePath).pipe(process.stdout);
Example #13
0
 childProcess.execFile(opts.phantomPath, childArgs, function (er, stdout, stderr) {
   //if (stdout) console.log(stdout)
   //if (stderr) console.error(stderr)
   if (er) return outputStream.emit("error", er)
   fs.createReadStream(tmpPdfPath).pipe(outputStream)
 })
Example #14
0
var tar = require("../tar.js")
    , fs = require("fs")

fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
    .pipe(tar.Parse())
    .on("extendedHeader", function (e) {
        console.error("extended pax header", e.props)
        e.on("end", function () {
            console.error("extended pax fields:", e.fields)
        })
    })
    .on("ignoredEntry", function (e) {
        console.error("ignoredEntry?!?", e.props)
    })
    .on("longLinkpath", function (e) {
        console.error("longLinkpath entry", e.props)
        e.on("end", function () {
            console.error("value=%j", e.body.toString())
        })
    })
    .on("longPath", function (e) {
        console.error("longPath entry", e.props)
        e.on("end", function () {
            console.error("value=%j", e.body.toString())
        })
    })
    .on("entry", function (e) {
        console.error("entry", e.props)
        e.on("data", function (c) {
            console.error("  >>>" + c.toString().replace(/\n/g, "\\n"))
        })
Example #15
0
app.get('/', function(req, res){
  fs.createReadStream('test/node/fixtures/user.json').pipe(res);
});
exports.createWebinosJS = function(node_moduleDir, apiModules) {
    var wrt_Dir, fileList, data, i, j, fileName, webroot_Dir, stat, android_Dir;
    var os = require('os');
    webroot_Dir = path.join(node_moduleDir, "../web_root");
    wrt_Dir = path.join(node_moduleDir, "../wrt"); // To read webinos.js and webinos.session.js
    android_Dir = path.join(node_moduleDir, "../android");
    fs.writeFileSync(path.join(webroot_Dir, "webinos.js"),""); // Overwrite/create file
    webinosJS = fs.createWriteStream(path.join(webroot_Dir, "webinos.js"), { flags:"a", encoding:"utf8"});

    webinosJS.write("(function(exports){\n\n");

    // Need to write first webinos.session and then webinos.js otherwise it fails
    
    var mandatoryWebinosJS=[
        path.join(node_moduleDir,"webinos-jsonrpc2","lib","registry.js"),
        path.join(node_moduleDir,"webinos-jsonrpc2","lib","rpc.js"),
        path.join(node_moduleDir,"webinos-utilities","lib","messagehandler.js"),
        path.join(wrt_Dir,"webinos.session.js"),
        path.join(wrt_Dir,"webinos.js"),
        path.join(node_moduleDir,"webinos-utilities","wrt","webinos.service.js"),
        path.join(node_moduleDir,"webinos-utilities","wrt","webinos.servicedisco.js")
    ];

    //If webinos-dashboard exists, load the wrt file
    var dashboard = null;
    try {dashboard = require.resolve("webinos-dashboard");}catch (e){}
    if (dashboard != null){
        mandatoryWebinosJS.push(path.join(node_moduleDir,"webinos-dashboard","wrt","webinos.dashboard.js"));
    }

    mandatoryWebinosJS.forEach(function(name){
        data = fs.readFileSync(name);
        webinosJS.write(data.toString());
    });

    // Gather folders starting with webinos-api
    for (i = 0; i < apiModules.length; i = i + 1) {
        fileName = fs.readdirSync(path.join(apiModules[i].path, "wrt"));
        for (j=0; j < fileName.length; j = j + 1) {
            stat = fs.statSync(path.join(apiModules[i].path, "wrt", fileName[j]));
            if (!stat.isFile()) continue;

            try {
                data = fs.readFileSync(path.join(apiModules[i].path, "wrt", fileName[j]));
                webinosJS.write(data.toString());
            } catch(err) {
                logger.log("Webinos module without client side code. " ,"Using Web RunTime you will not be able to access module "+ apiModules[i].name);
            }
        }
    }

    webinosJS.write("\n})(window);\n");
    
    if(os.platform() === 'android')
    {
        var androidInterfaces = require("../platform_interfaces.json").android;
        var wrtDir = androidInterfaces.wrt;
        if (!existsSync (wrtDir)) 
            fs.mkdirSync (wrtDir);
        staticPath = androidInterfaces.static;
        if (!existsSync (staticPath)) 
            fs.mkdirSync (staticPath);
      
        fs.writeFileSync(path.join(staticPath, "webinos.js"),""); 
        wrtJs = fs.createWriteStream(path.join(staticPath, "webinos.js"), { flags:"a", encoding:"utf8"});
        var webinossoketJs = path.join(android_Dir,"wrt","webinossocket.js");
        data = fs.readFileSync(webinossoketJs);
        wrtJs.write(data.toString());
        fs.createReadStream(path.join(webroot_Dir, "webinos.js")).pipe(wrtJs);
    } 
};
Example #17
0
var fs = require('fs')


var EC2 = "ec2-174-129-179-178.compute-1.amazonaws.com"
  , PORT = 5001
  , FILE = 'thing.c'


var socket = net.connect(
  {
    port: PORT
  , host: EC2
  }
, connectcb )

var fstream =  fs.createReadStream(FILE)

fstream.on('end', function () {
  console.log('transfer complete')
  socket.end()
})


socket.on('end', function () {
  console.log('client disconnecting')
})

function connectcb() {
  console.log('client connected - transferring file')
  fstream.pipe(socket)
Example #18
0
 watcher.on('change', function(path) {
   fs.createReadStream(path).on('data', checkBuffer);
 });
var args = require('minimist')(process.argv);
var AWS = require('aws-sdk');
var fs = require('fs');
var awsS3Client = new AWS.S3();
var s3Stream = require('s3-upload-stream')(awsS3Client);


console.log("Arguments: ");
console.dir(args);
console.log("Uplading file: " + args.file);

AWS.config.update({region: 'us-west-2'});

var read = fs.createReadStream(args.file);
var upload = s3Stream.upload({
    Bucket: args.s3Bucket,
    Key: args.s3Key
});

upload.maxPartSize(20 * 1024 * 1024); // 20 MB
upload.concurrentParts(10);

upload.on('error', function (error) {
    console.log('error');
    console.error(error);
});

var megabate = 1024 * 1024;

upload.on('part', function (details) {
    var receivedSize = (details.receivedSize / megabate);
Example #20
0
var test = require('tap').test
var fs = require('fs')
var Questions = require('../questions.js')
var f = __dirname + '/fixtures'
var expectQm = fs.readFileSync(f + '/questions.txt', 'utf8')
    .replace(/\n+/g, '\n')
var expectQ = fs.readFileSync(f + '/questions-markless.txt', 'utf8')
    .replace(/\n+/g, '\n')

var log = fs.createReadStream(f + '/log.txt')
var qm = Questions()
var q = Questions({mark:false})

log.pipe(qm)
log.pipe(q)

var bufQm = []
var bufQ = []
q.on('data', bufQ.push.bind(bufQ))
qm.on('data', bufQ.push.bind(bufQm))


test('basic', function (t) {
  t.plan(2)

  q.on('end', function () {
    console.error('q')
    t.equal(bufQ.join('\n').replace(/\n+/g, '\n'), expectQ, 'markless')
  })

  qm.on('end', function () {

var fs = require('fs');
var stream  = fs.createReadStream('file.txt');



//as event-driven
stream.on('data', function(data){
  var chunk = data.toString();
  process.stdout.write(chunk);
})

/*
stream.on('end', function(){
  console.log();
})
*/

stream.on('error', function(error){
  console.error(error.message);
})
Example #22
0
app.get('/', function(req, res) {
  var rs = fs.createReadStream(__dirname + '/streamchat.html');
  sys.pump(rs, res);
});
Example #23
0
app.use('/tidepoolplatform.js', function(req, res) {
  res.setHeader('Content-Type', 'text/javascript');

  var stream = fs.createReadStream(__dirname + '/app/tidepoolplatform.js');
  stream.pipe(res);
});
 beforeEach(function (done) {
   len = Buffer.byteLength(fileText);
   stream = fs.createReadStream(localFileName);
   done();
 });
                    name.indexOf('Temporary Identifier') === -1 &&
                    name.indexOf('Identifier Course') === -1 &&
                    name.indexOf('Course-King') === -1 &&
                    name.indexOf('Identifier - Classics Visitor') === -1 &&
                    name.indexOf('Short Course ID Education') === -1 &&
                    name.indexOf('ECDL course identifier') === -1 &&
                    name.indexOf('Identifier - Education Visitor') === -1 &&
                    name.indexOf('PWF. Identifier') === -1 &&
                    name.indexOf('Identifier - Physics Visitor') === -1 &&
                    user[1] && user[2] && user[3])
        })

        // Return just the displayName, email & uid
        .map(function(user) {
            return [user[1], user[2], user[3] + '@cam.ac.uk'];
        });

    csv.stringify(properUsers, function(err, output) {
        if (err) {
            console.error(err);
            process.exit(1);
        }

        fs.writeFileSync('clean_users.csv', output);
        console.log('Generated clean_users.csv with %d users in it', properUsers.length);
        process.exit(0);
    });
});

fs.createReadStream(process.argv[2]).pipe(parser);
exports.uploads = function(req, res) {
  res.writeHead(200, {'Content-Type': 'image/jpeg'} );
  var path = './uploads/' + req.params.name;
  var fileStream = fs.createReadStream(path);
  fileStream.pipe(res);
};
Example #27
0
  var handler = function(request, response) {
    if(contentType) response.setHeader('Content-Type', contentType)

    response.writeHead(200)
    fs.createReadStream(filePath).pipe(response)
  }
Example #28
0
var args = require('minimist')(process.argv.slice(2)),
	database = args.database || 'big-mongo-test',
	collection = args.collection || 'bigdata',
	filename = args.filename || 'test.json',
	fs = require('fs'),
	JSONstream = require('JSONstream'),
	MongoClient = require('mongodb').MongoClient,
	path = require('path');

var filepath = path.join(__dirname, filename);

var stream = fs.createReadStream(filepath, {encoding: 'UTF-8'});

var parser = JSONstream.parse([/./]);

var list = [];
var i = 0;
var g = 0;
var url = 'mongodb://127.0.0.1:27017/' + database;
if (args.help) {
	console.log("command line options");
	console.log("");
	console.log("--database: \t the mongodb database you'd like to use for the import process.");
	console.log("\t\t defaults to `big-mongo-test`");
	console.log("");
	console.log("--collection: \t the mongodb collection you'd like to use for the import process.");
	console.log("\t\t defaults to `big-data`");
	console.log("");
	console.log("--filename: \t the file name of the JSON document that was generated. Did you remember it?");
	console.log("\t\t defaults to `test.json`");
	console.log("");
Example #29
0
			bucket.events.upload.run(ctx, {url: ctx.url, fileSize: file.size, fileName: ctx.url}, function(err) {
				if (err) return uploadedFile(err);
				bucket.uploadFile(curcafe,fn[fn.length-1] || file.name || ctx.url, file.size, file.type, fs.createReadStream(file.path), uploadedFile);
			});
Example #30
0
var app = http.createServer(function(req, res) {
	res.writeHead(200, {'Content-Type': 'text/html'});
	fs.createReadStream('index.html').pipe(res)
});