.then(resp => { switch (argv.format) { case 'json': LOGGER.info(JSON.stringify(resp.data)); break; case 'csv': default: const opts = { header: true }; if (resp.data.events) { const events = resp.data.events.map(event => { // eslint-disable-next-line no-unused-vars let { __typename, ...mappedEvent } = event; return mappedEvent; }); if (events.length > 0) { csvify(events, opts, (err, output) => { LOGGER.info(output); }); } } const view = resp.data.view; if (view) { csvify(view.lines, opts, (err, output) => { LOGGER.info(`Event Summary: ${view.name}`); LOGGER.info('----------------------------------------------------'); LOGGER.info(output); }); } break; } });
objectStream._write = function (chunk, encoding, callback) { if (observationsCsv) { // save some info to help produce (filter results) in other generated csv files sites.add(chunk.site_id); individuals.add(chunk.individual_id); var observedByPersonIds = chunk.observedby_person_id; if (observedByPersonIds) { if (observedByPersonIds.split) { for (var _i = 0, _a = observedByPersonIds.split(','); _i < _a.length; _i++) { var observerId = _a[_i]; observers.add(observerId.replace(/'/g, "")); } } else observers.add(observedByPersonIds); } groups.add(chunk.observation_group_id); } csvStringify([chunk], { header: firstRow_1 }, function (err, data) { if (firstRow_1) { data = headerMappings_1.renameHeaders(sheetName, data); headerWrote_1 = true; firstRow_1 = false; } fs.appendFileSync(csvPath_1, data); callback(); }); };
module.exports = function csv(transactions, options, callback) { var head = [ 'Date', 'Payee', 'Amount', 'Category', 'Currency', 'Rate', 'Comments', 'Number', ]; stringify([head].concat(transactions.map(function (transaction) { return [ transaction.date, transaction.payee, (transaction.localAmount / 100).toFixed(2), transaction.category, transaction.currency || '', transaction.rate || 1, transaction.memo, transaction.id ]; })), { delimiter: options.delimiter }, callback); };
fs.readFile(f,function(eread,filedata){ if(eread){ console.log(eread.code) if((eread.code) =='ENOENT'){ // just means no file }else{ throw new Error(eread) } } if(filedata !== undefined){ var memo = JSON.parse(filedata) stringify(arrayifier(memo),function(e,arr){ res.end(arr) }) // if json file exists, and writing // csv, can bail on the rest, all done // here return null } // else, create the file, and let the // handler dump as csv or json fractions_handler_one_hour( hpmsgrids[task.year] ,task ,generated_response_handler); return null })
.exec(function (err, clicks) { if (err) { return next(Error.create('An error occurred trying get the page\'s report.', {id: req.params.id}, err)); } var header = ['IP', 'Hora', 'Dispositivo', 'Pagina', 'Referencia', 'Agent']; var dataArray = _.map(clicks, function (click) { return [ click.ipAddress, getLocalTimeFormat(click.timestamp, req.company.timezone), click.device, click.page.name, click.valueReference, click.agent ]; }); dataArray.unshift(header); stringify(dataArray, {delimiter: ';'}, function (err, data) { if (err) { return next(Error.create('An error occurred trying get the page\'s report.', {id: req.params.id}, err)); } res.set('Content-Type', 'text/csv'); res.setHeader('Content-disposition', 'attachment; filename=' + clicks[0].page.name + ' ' + getLocalTimeFormat(new Date(), req.company.timezone) + '.csv'); res.send(data); }); });
// loadComments will load batches of the comments and write them to the csv // stream. Once the comments have finished writing, it will close the stream. async function loadComments(ctx, userID, archive, latestContentDate) { // Create all the csv writers that'll write the data to the archive. const csv = stringify(); // Add all the streams as files to the archive. archive.append(csv, { name: 'comments-export/my_comments.csv' }); csv.write([ 'Comment ID', 'Published Timestamp', 'Article URL', 'Comment Link', 'Comment Text', ]); // Load the first batch's comments from the latest date that we were provided // from the token. let connection = await loadCommentsBatch(ctx, csv, { cursor: latestContentDate, userID, }); // As long as there's more comments, keep paginating. while (connection.hasNextPage) { connection = await loadCommentsBatch(ctx, csv, { cursor: connection.endCursor, userID, }); } csv.end(); }
(function main () { var times = launchExperiments(); // stringify(times, function (e, csv) { // if(e) { // return console.log(e); // } // fs.writeFile('serializing.csv', csv, function (e) { // if(e) { // return console.log(e); // } // console.log('Done!'); // }); // }); stringify(times, function (e, csv) { if(e) { return console.log(e); } fs.writeFile('deserializing.csv', csv, function (e) { if(e) { return console.log(e); } console.log('Done!'); }); }); })();
var convert = function(model, data, next) { var text = new Buffer(data, 'base64').toString('utf-8'); var options = { delimiter: model.options.separator, convertToTypes: { convert: true, decimalDelimiter: model.options.decimalDelimiter, dateFormat: model.options.dateFormat }, header: false }; var records = csvparse(text, options); var csv = ''; var stringifier = stringify(); stringifier.on('readable', function() { var row = stringifier.read(); while(row) { csv += row; row = stringifier.read(); } }); stringifier.on('error', function(err) { console.log(err.message); }); for (var i = 0; i < records.data.length; i++) { stringifier.write(normalizeDateTime(records.data[i])); } stringifier.end(); next(null, csv); };
CSVGen.writeStream = function(data, streamOut, options){ stringifier = stringify(options); stringifier.pipe(streamOut); if (options.template && typeof(options.template) == 'object'){ if (Object.prototype.toString.call(options.template) != '[object Array]'){ var temp = []; for(var t in options.template) temp.push(options.template[t]); stringifier.write(temp); for(var i in data){ var temp = []; for(var t in options.template) temp.push(data[i][t]); stringifier.write(temp); } stringifier.end(); return; } else { stringifier.write(options.template); } } if (options.transformer && typeof(options.transformer) == 'function'){ for(var i in data) stringifier.write(options.transformer(data[i])); } else { for(var i in data) stringifier.write(data[i]); } stringifier.end(); }
var writer = function _writeFile(fileName, contentObj, res){ try{ process.stdout.write('write File entered: ' +__dirname+ fileName + '\n'); process.stdout.write(''); csvWrite(contentObj, (err, csvdata)=>{ process.stdout.write('write File csvWrite: ' +__dirname+ fileName + '\n'); if (err) res.send(err);//throw err; console.log(csvdata); fs.writeFile(__dirname+fileName, csvdata, (err) => { process.stdout.write('write File write: ' +__dirname+ fileName + '\n'); if (err) res.send(err);//throw err; res.send("saving ok."); }); }); } catch (err) { console.error(err); res.send(err); } };
return new Promise(function (resolve, reject) { // filter out unused params, beautify param keys, and convert array values to comma separated strings var paramsArray = [["Parameter", "Setting"]]; for (var key in params) { if (key === 'species_id' || key === 'additional_field' || key === 'request_src' || key === 'bottom_left_x1' || key === 'bottom_left_y1' || key === 'upper_right_x2' || key === 'upper_right_y2' || key === 'ancillary_data' || key === 'dataset_ids') continue; if (params.hasOwnProperty(key) && params[key] && params[key] != [] && params[key] != '') { if (params[key] instanceof Array) paramsArray.push([npnPortalParams_1.paramNamesBeautified[key], params[key].join(', ')]); else paramsArray.push([npnPortalParams_1.paramNamesBeautified[key], params[key]]); } } // write the csv file and resolve promise with the created csv's filename var searchParametersCsvFileName = 'search_parameters' + requestTimestamp.toString() + '.csv'; var searchParametersCsvPath = config.get('save_path') + searchParametersCsvFileName; csvStringify(paramsArray, function (err, output) { fs.appendFile(searchParametersCsvPath, output, function (err) { if (err) { reject(err); } else { resolve(searchParametersCsvFileName); } }); }); });
function(callback){ stringify(data, function(err, output){ if(err){ return callback(err); } callback(null, output); }); },
return new Promise((resolve, reject) => { csv_stringify(data, { header: true, quoted: true, qoutedString: true }, (err, out) => { if (err) { reject(err); } else { resolve(out); } }); });
return new Promise(function (resolve, reject) { csv_stringify(volunteers, { header: true, escape: true }, function (err, out) { if (err) { return reject(err); } else { return resolve(out); } }); });
function csvSample(data) { var stringify = require('csv-stringify'); stringify(data, function(err, output) { if (err) { return console.log("Stringify didn't work.", err); } console.log(output); }); }
var writeToFile = function(stateName,filename, list) { var File = './csv_files/'+stateName+"/" + filename + '.csv'; var dir = './csv_files/'+stateName; if (!fs.existsSync(dir)){ fs.mkdirSync(dir); } var content = stringify(list, {header: false}, function(err, output){ fs.appendFileSync(File, output, {encoding: 'utf8'}); }); }
app.get('/recorder/download', function(req, res, next) { var result = []; res.setHeader('Content-disposition', 'attachment; filename=dc-electronic-load-data.csv'); res.contentType('text/csv'); res.write("Time,Volts (mV),Amps (mA),Set Amps (mA)\n"); csv(data, function(err, data) { res.end(data); }); });
exports.mergeStreetName = function mergeStreetName(source, cachedir, callback){ var debug = require('debug')('conform:csv:mergeStreetName'); debug("Merging Columns"); var cols = source.conform.merge.slice(0); var loc = cachedir + source._id + "/out.csv"; if (fs.exists('./tmp.csv')) fs.unlinkSync('./tmp.csv'); var instream = fs.createReadStream(loc); var outstream = fs.createWriteStream('./tmp.csv'); var stringifier = stringify(); var parser = parse({ relax: true }); parser.on('error', function(err) { debug(err); }); var linenum = 0; var mergeIndices = []; var transformer = transform(function(data) { linenum++; if (linenum === 1) { lowerData = data.map(function(x) { return x.toLowerCase(); } ); cols.forEach(function(name, i) { mergeIndices.push(lowerData.indexOf(name.toLowerCase())); }); data.push('auto_street'); return data; } else { var pieces = []; mergeIndices.forEach(function(index) { pieces.push(data[index]); }); data.push(pieces.join(' ')); return data; } }); outstream.on('close', function() { fs.rename('./tmp.csv', loc, function(err){ callback(err); }); }); instream .pipe(parser) .pipe(transformer) .pipe(stringifier) .pipe(outstream); };
module.exports = function(report, items, options, services, callback) { var data = tabular(report, items, options, services); var stream = csv({ columns: data.headers, header: true }); callback(null, stream); // Write CSV rows _.each(data.rows, _.bindKey(stream, 'write')); stream.end(); };
data.forEach((f,index) => { stringify(f, {delimiter : '\t'}, function(err, data) { var name = dir + '/tucan-' + index + '.txt'; data = encoding.convert(data, 'windows1252', 'utf8'); fs.writeFile(name, data, (err) => { if(err) { errors.push(err); } }); }); });
.then( function( cards ) { if ( options.json ) { console.log( JSON.stringify( cards ) ); } else { stringify( cards, { header: true }, function( err, csvOutput ) { if ( err ) { console.error( "err:", err ); } console.log( csvOutput ); } ); } }, function( err ) {
// closure around request object function generated_response_handler(e,t){ var memo = {} if(e) return next(e) // iterate over each grid cell // memo = {}//_.reduce()reduce.reduce({},task) memo = reduce.reduce_one_hour({},task) if(csv_test.test(req.params.format)){ stringify(arrayifier(memo),function(e,arr){ res.end(arr) }) }else{ return res.json(memo) } return null }
function createCSVFile(outputStream, reportColumns, callback) { var csvStringifier = csvStringify({ header: true, columns: reportColumns }); var fileName = 'report_' + shortid.generate() + '.csv'; var writeStream = fs.createWriteStream('./report_files/' + fileName, {defaultEncoding: 'utf8'}); outputStream.pipe(csvStringifier).pipe(writeStream); outputStream.on('end', function() { callback(fileName); }); writeStream.on('error', function (err) { log.error(err); }); }
.exec(function (err, clicks) { if (err) { return next(Error.create('An error occurred trying get the page\'s report.', {id: req.params.id}, err)); } var header = ['IP', 'Hora', 'Dispositivo', 'Pagina', 'Referencia', 'Agent']; _.forEach(_.range(1, 16), function (x) { header.push('Param ' + x); }); var dataArray = _.map(clicks, function (click) { var value = _.find(values, function (x) { return x._id.equals(click.valueReference); }) || {}; return [ click.ipAddress, getLocalTimeFormat(click.timestamp, req.company.timezone), click.device, click.customPage.name, click.valueReference, click.agent, value.parameter1, value.parameter2, value.parameter3, value.parameter4, value.parameter5, value.parameter6, value.parameter7, value.parameter8, value.parameter9, value.parameter10, value.parameter11, value.parameter12, value.parameter13, value.parameter14, value.parameter15 ]; }); dataArray.unshift(header); stringify(dataArray, {delimiter: ';'}, function (err, data) { if (err) { return next(Error.create('An error occurred trying get the page\'s report.', {id: req.params.id}, err)); } res.set('Content-Type', 'text/csv'); res.setHeader('Content-disposition', 'attachment; filename=' + clicks[0].customPage.name + ' ' + getLocalTimeFormat(new Date(), req.company.timezone) + '.csv'); res.send(data); }); });
var writeCSV = function(path, data, callback){ stringify(data, function(error, output){ if(error){ callback(error); } fs.writeFile(path, output, function(error) { if(error) { callback(error); } callback(null); }); }); };
return req.session.destroy(function(err){ if(err){ return res.status(500).send('Something went wrong.'); } else { if (!result) return res.status(400).send('No result session available.'); stringify(result, (err, data) => { if (err) { return res.status(404).send('Failed converting result to CSV.'); } res.setHeader('Content-Disposition', 'attachment; filename=result.csv'); res.set('Content-Type', 'text/csv'); return res.status(200).send(data); }); } });
.await(function(e2,t){ var memo if(e2) return next(e2) memo = reduce.reduce({},task) if(csv_test.test(req.params.format)){ // respond with csv res.writeHead(200, { 'Content-Type': 'text/csv' }) stringify(arrayifier(memo),function(e3,arr){ if(e3) throw new Error(e3) res.end(arr) }) return null }else{ return res.json(memo) } })
}).then(function(data){ // filter out invalud statuses data = chargebeeStatus(data) // if start and end segment out dates data = (start) ? chargebeeDate(data, start, endUsage) : data // log count var counts = chargebeeCountStatus(data); _.each(counts, function(count, key){ debug("%s %d", key, count); }) // only keep the columns we need data = chargebeeColumns(data, columns) // turn the object back into CSV string return stringify(data, {header: true}) }).then(function(data){
function _convertToCSV(reportsArray, callback) { const reportCount = {}; const csvInput = [['nodeId', 'paymentAddress', 'reportCount']]; reportsArray.forEach((report) => { let {contactNodeId: nodeId, paymentAddress: sjcx} = report; let key = `${nodeId}:${sjcx}`; reportCount[key] = (reportCount[key] || 0) + 1; }); for (let key in reportCount) { let [nodeId, paymentAddress] = key.split(':'); csvInput.push([nodeId, paymentAddress, reportCount[key]]); } return toCSV(csvInput, callback); }
it('should dump the header, only once',function(done){ var dumper,header,stringifier,out header = ['foo', 'bar.foo', 'bar.bar.foo', 'bar.bar.bar.0', 'bar.bar.bar.1', 'bar.bar.bar.2', 'bar.bar.bar.3', 'bar.bar.bar.4', 'bar.bar.bar.5', 'bar.bar.bar.6', 'bar.bar.baz', 'bar.baz', 'baz'] stringifier = stringify() out = fs.createWriteStream(file, { encoding: 'utf8' }) stringifier.pipe(out) dumper = dump_header(header) dumper(stringifier,function(){ dumper(stringifier,function(){ // should have only written the first //stringifier.end() out.end() return null }) return null }) stringifier.on('finish', function() { out.end() return null }) out.on('finish',function(){ // load the file, make sure fs.readFile(file,{encoding:'utf8'},function(err,data){ data.should.eql('foo,bar.foo,bar.bar.foo,bar.bar.bar.0,bar.bar.bar.1,bar.bar.bar.2,bar.bar.bar.3,bar.bar.bar.4,bar.bar.bar.5,bar.bar.bar.6,bar.bar.baz,bar.baz,baz\n') // eslint-disable-line max-len return done() }) return null }) return null })