var assert = require('assert'); // TODO Improved this test. test_ca.pem is too small. A proper test would // great a large utf8 (with multibyte chars) file and stream it in, // performing sanity checks throughout. var path = require('path'); var fs = require('fs'); var fn = path.join(common.fixturesDir, 'elipses.txt'); var rangeFile = path.join(common.fixturesDir, 'x.txt'); var callbacks = { open: 0, end: 0, close: 0, destroy: 0 }; var paused = false; var file = fs.ReadStream(fn); file.on('open', function(fd) { file.length = 0; callbacks.open++; assert.equal('number', typeof fd); assert.ok(file.readable); // GH-535 file.pause(); file.resume(); file.pause(); file.resume(); }); file.on('data', function(data) {
var express = require('express'); var fs = require('fs'); var app = express(); var resourceful = require('resourceful'); var myLocArr = []; var readStream = fs.ReadStream(__dirname + "/transport.csv"); var TrvsLocation = resourceful.define('trvsLocation', function () { this.use('couchdb', { uri: 'http://127.0.0.1:5984/locations' }); this.string('_id'); this.string('town'); this.number('population'); this.string('provinceCode'); this.string('townCode'); this.string('country'); }); readStream.on('data', function(data) { var csvLoc = data.toString(); var lines = csvLoc.split('\n'); parseLines(lines); }); parseLines = function (mArr){ for (var i = 0; i < mArr.length; i++) { var mLine = mArr[i].split(","); myLocArr.push(mLine);
return req.on("continue", function() { var rs2; rs2 = fs.ReadStream(filePath); rs2.on('error', callback); return rs2.pipe(req); });
var fs = require('fs'), readline = require('readline'), stream = require('stream'); var ASIAN_COUNTRIES = ["Afghanistan", "Bahrain", "Bangladesh", "Bhutan", "Myanmar", "Cambodia", "China", "India", "Indonesia", "Iraq", "Israel", "Japan", "Jordan", "Kazakhstan", "Lebanon", "Malaysia", "Maldives", "Mongolia", "Nepal", "Oman", "Pakistan", "Philippines", "Qatar", "Saudi Arabia", "Singapore", "Sri Lanka", "Syrian Arab Republic", "Tajikistan", "Thailand", "Timor-Leste", "Turkmenistan", "United Arab Emirates", "Uzbekistan", "Vietnam", "Yemen"]; var instream = fs.ReadStream('Indicators.csv'); var outstream1 = fs.WriteStream('output1.json'); outstream1.readable = true; outstream1.writable = true; var outstream2 = fs.WriteStream('output2.json'); outstream2.readable = true; outstream2.writable = true; var headers = []; var count1 = 0, count2 = 0; var countryIndex, indicatorCodeIndex; var rl = readline.createInterface({ input: instream, terminal: false }); rl.on('line', function(line) { if(count1==0) { headers=line.split(","); //console.log(headers); countryIndex = headers.indexOf("CountryName"); indicatorCodeIndex = headers.indexOf("IndicatorCode"); headers.push("Color"); outstream1.write("["); outstream2.write("[");
var filename = process.argv[2]; var crypto = require('crypto'); var fs = require('fs'); var shasum = crypto.createHash('sha1'); var s = fs.ReadStream(filename); s.on('data', function(d) { shasum.update(d); }); s.on('end', function() { var d = shasum.digest('hex'); console.log(d + ' ' + filename); });
return fs.stat(filePath, __bind(function(err, stats) { var contentLength, md5Hash, rs; if (err != null) { return callback(err); } contentLength = stats.size; md5Hash = crypto.createHash('md5'); rs = fs.ReadStream(filePath); rs.on('data', function(d) { return md5Hash.update(d); }); return rs.on('end', __bind(function() { var date, httpOptions, k, md5, req, timeout, v; md5 = md5Hash.digest('base64'); date = new Date(); httpOptions = { host: "s3.amazonaws.com", path: "/" + this.bucket + "/" + resource, headers: { "Authorization": "AWS " + this.awsKey + ":" + (this.sign(resource, md5, mimeType, date, amzHeaders)), "Date": date.toUTCString(), "Content-Length": contentLength, "Content-Type": mimeType, "Content-MD5": md5, "Expect": "100-continue" }, method: "PUT" }; for (k in headers) { httpOptions.headers[k] = headers[k]; } timeout = null; req = (this.secure ? https : http).request(httpOptions, __bind(function(res) { var headers, responseBody; if (res.statusCode === 200) { clearTimeout(timeout); headers = JSON.stringify(res.headers); return callback(null, { headers: headers, code: res.statusCode }); } responseBody = ""; res.setEncoding("utf8"); res.on("data", function(chunk) { return responseBody += chunk; }); return res.on("end", function() { var parser; parser = new xml2js.Parser(); return parser.parseString(responseBody, function(err, result) { if (err != null) { return callback(err); } return callback(result); }); }); }, this)); timeout = delayTimeout(this.timeout, __bind(function() { req.abort(); return callback({ message: "Timed out after " + this.timeout + "ms" }); }, this)); return req.on("continue", function() { var rs2; rs2 = fs.ReadStream(filePath); rs2.on('error', callback); return rs2.pipe(req); }); }, this)); }, this));
function helpCmd () { fs.ReadStream(path.join(__dirname, 'USAGE.txt')).pipe(process.stdout) }
var fs = require('fs'); var readStream = fs.ReadStream('names.txt'); var writeStream = fs.WriteStream('out.txt'); readStream.setEncoding('utf8'); readStream.on('data', function(chunk) { writeStream.write(chunk); }); readStream.on('close', function() { writeStream.end(); });
transports: [ options.p || options.publish ? new (winston.transports.Console)({ handleExceptions: true, filename: settings.errorLog }) : new (winston.transports.File)({ handleExceptions: true, filename: settings.errorLog }) ] }) // These have to be included after we parse the settings file var utils = require('./libs/utils'); var server = require('./libs/server'); var client = require('./libs/client'); console.log("Updating local files cache ..."); // Settings file checksum utils.updateFile(fs.ReadStream(path.join(settings.root, 'settings.json')), path.join(settings.root, 'settings.json')); var counter = 0; settings.contentDirs.forEach(function(dir){ if (settings.watchFiles) { // we want to watch files, so there is no need to use caching for posts settings.useCaching = false; var files = fs.readdirSync(dir); var containsMarkdown = false; for (var i = 0; i < files.length; i++) { if (files[i].substr(-2).toLowerCase() === 'md') { containsMarkdown = true;
utils.crawl(path.join(settings.root, dir), function(filepath){ counter++; utils[filepath.substr(-2).toLowerCase() === 'md' ? 'updatePost' : 'updateFile'](fs.ReadStream(filepath), filepath, {}, function(){ if(--counter){ return; } if(options.p || options.publish) { client.publish(); } else if(options.s || options.serve) { require('http').createServer(server).listen(settings.port); console.log("Serving this blog on " + settings.server + ":" + settings.port); } else { console.log("\nUsage:\n"); console.log("blog.js --serve (-s) to start the server."); console.log("blog.js --publish (-p) to publish your latest changes."); console.log("Edit settings.json to change settings.\n"); } }); });
.describe('i', 'Filename (with path) for file to upload') .options('b', { alias : 'bucket' }) .describe('b', 'AWS Bucket Name') .options('s', { alias : 'strip' }) .describe('s', 'Strip out first n segments of the path when creating output path on S3') .options('p', { alias : 'prepend' }) .describe('p', 'Prepend absolute path to S3 output path. Use with --strip to remove unneeded initial path segments.') .demand(['i', 'b']) .argv ; var filePathToBackup = argv.i, awsAccessKey = process.env.AWS_ACCESS_KEY_ID, awsSecretKey = process.env.AWS_SECRET_ACCESS_KEY, awsBucket = argv.b, numberOfPathPartsToStrip = argv.s, prependText = argv.p; if (!awsAccessKey || !awsSecretKey) { console.log('Missing AWS Credentails. You need two environment variables defined: AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY'); process.exit(1); } toS3.upload(filePathToBackup, numberOfPathPartsToStrip, prependText, fs.ReadStream(filePathToBackup), awsBucket, awsAccessKey, awsSecretKey);
si.ready(function(err,si){ console.log('ready '+err+si) var read = fs.ReadStream(npmfile); read.setEncoding('ascii'); var linestream = byline.createStream() var count = 0 var done = 0 var modent = si.make('mod') linestream.on('data', function(line){ try { var data = JSON.parse( cutcomma(line) ) count++ modent.load$({name:data.id},function(err,mod){ if(err) { return console.error(err) } mod = mod || modent.make$() mod.name = data.id var latest = npmentry.latest(data) if( latest && latest.repository && 'git'==latest.repository.type ) { mod.lastgit = mod.lastgit || 0 mod.giturl = latest.repository.url } else { mod.lastgit = Number.MAX_VALUE } mod.lastnpm = mod.lastnpm || 0 mod.save$(function(err,mod){ done++ if( 0 == count % 100 ) { process.stdout.write('*') } }) }) } catch(e){ console.error(e) } }) linestream.on('end',function(){ console.log('\nmongo-size:'+count) console.log('count:'+count+' done:'+done) function waitfordb() { if( done < count ) { process.stdout.write(' '+done) setTimeout(waitfordb,333) } else { si.close() if( argv.i ) { insertall(depsfile,argv.h) } } } waitfordb() }) read.pipe(linestream) })
function links( pathfile ) { console.log('STAGE: links '+filepath) var depsfile = path.dirname(filepath) +'/deps.json' var linksfile = path.dirname(filepath) +'/links.json' var read = fs.ReadStream(depsfile); read.setEncoding('ascii'); //var jsr = js.parse([true]) var linestream = byline.createStream() var index = {} var links = { "py/object": "__main__.web", dangling_pages:{}, in_links:{}, number_out_links:{}, } var count = 0 linestream.on('data',function(line){ if( ',' == line || '[' == line || ']' == line ) return; try { var data = JSON.parse(cutcomma(line)) data.i = ''+data.i index[data.m]=data links.dangling_pages[data.i]=true count++ if( 0 == count % 100 ) { process.stdout.write('+') } } catch( e ) { console.log(e+' bad line:<'+line+'>') } }) linestream.on('end',function(){ console.log('end') var count = 0 for( var n in index ) { count++ var m = index[n] links.in_links[m.i] = [] } for( var n in index ) { var m = index[n] //console.log(n+' -> '+m.d) for( var dI = 0; dI < m.d.length; dI++ ) { var depname = m.d[dI] if( _.isUndefined(index[depname]) ) { continue } var dep = index[depname].i links.in_links[dep].push( m.i ) if( _.isUndefined(links.number_out_links[m.i]) ) { links.number_out_links[m.i]=1 delete links.dangling_pages[m.i] } else { links.number_out_links[m.i]++ } } } links.size = count fs.writeFileSync(linksfile,JSON.stringify(links)) console.log('links-size:'+count) if( argv.r ) { rank(depsfile) } }) read.pipe(linestream) }
var crypto = require('crypto'); var fs = require('fs'); var h = crypto.createHash('sha256'); var inStream = fs.ReadStream(__filename); inStream.addListener('data', function(data) { h.update(data); }); inStream.addListener('end', function() { console.log(h.digest('hex')+'\n'); });
function load_host_config (filename, callback) { var data = [] var hash = crypto.createHash('sha1') var stream = fs.ReadStream(filename); stream.on('data', function (chunk) { hash.update(chunk) data.push(chunk) }) stream.on('end', function() { var config = { addresses: {}, subnets: {} } var port Buffer .concat(data) .toString() .split('\n') .map(function (line) { return line .replace(/#.*/,'') .replace(/\s+/g, ' ') .replace(/^ | $/g,'') .replace(/ = /, ' ') }) .forEach(function (line) { line = line.split(' ') switch (line[0]) { // Address = address [port] case 'Address': // Note: we cannot set default port here, as Port doesn't have to // be specified before Address in the host configuration. config.addresses[line[1]] = Number(line[2]) break // Port = port (655) case 'Port': port = Number(line[1]) break // Subnet = address[/prefixlength[#weight]] case 'Subnet': line[1] = line[1].split('#') var subnet = line[1][0] var weight = Number(line[1][1]) if (isNaN(weight)) { weight = 10 } config.subnets[subnet] = weight break } }) // finalize var addresses = Object.keys(config.addresses) if (addresses.length > 0) { // set default port addresses.forEach(function (address) { if (isNaN(config.addresses[address])) { config.addresses[address] = 655 } }) } callback(null, config, hash.digest('hex')) }) // TODO on('error') }