fs.access(path.join(appSource, '.cfignore'), fs.R_OK, (err) => { var cfignore = null; if (!err) { cfignore = parser.compile(fs.readFileSync(path.join(appSource, '.cfignore'), 'utf8')); } filewalker(appSource) .on('file', (p, s) => { if (p === '.cfignore') return; if (!cfignore || cfignore.accepts(p)) { var stat = fs.statSync(path.join(appSource, p)); archive.append(fs.createReadStream(path.join(appSource, p)), { name: p, mode: stat.mode }); } }) .on('error', (err) => { callback(err); }) .on('done', () => { archive.pipe(output); archive.finalize(); output.on('close', () => { callback(null); }); }) .walk(); });
function(doneWithDirectory) { var files = []; filewalker(contentPath) .on('file', function(path, stats, absPath) { if(typeof myOptions.testToInclude === 'function') { //test if we should include file if(myOptions.testToInclude.apply(self, [path, stats, absPath])) { //rewrite URL path if(typeof myOptions.rewriteUrlPath === 'function') { var urlPath = myOptions.rewriteUrlPath.apply(self, [path, stats, absPath]); } else { var urlPath = path; } //push file to be processed files.push({ urlPath: urlPath, path: path, absPath: absPath }); } } else { self.emitError('You must set a function to staticSiteLoader.testToInclude in your webpack config to use this loader.') } }) .on('done', function() { doneWithDirectory(null, files); }) .walk(); },
function main(templateName, projectName, templateNames) { if(templateNames.indexOf(templateName) == -1) return console.error('Invalid template name provided! Pick one from these: ' + templateNames.join(', ')); if(!projectName) return console.error('No project name provided!'); var templatePath = path.join(__dirname, templateName); var confPath = path.join(__dirname, '..', 'config.json'); var conf; if(!fs.existsSync(confPath)) return console.error('Missing configuration! Create .mankees/config.json!'); if(fs.existsSync(projectName)) return console.error('Project exists already!'); conf = require(confPath); conf.project = projectName; console.log('Creating directory: ' + projectName); fs.mkdirSync(projectName); console.log('Injecting data to templates and writing output'); walker(templatePath).on('file', function(p) { var fp = path.join(templatePath, p); var data = handlebars.compile(fs.readFileSync(fp, {encoding: 'utf8'}))(conf); var op = path.join('.', projectName, p); console.log('Writing ' + op); fs.writeFileSync(op, data); }).walk(); }
function generateExamples() { var categories = {}; // 1. get root file(s) -> basic category // 2. get files in dirs -> dir name -> category, contents -> items filewalker('./public/examples').on('file', function(p) { var parts = p.split('/'); var category, item; if(parts.length > 1) { category = parts[0]; item = parts[1]; } else { category = 'none'; item = parts[0]; } item = item.split('.')[0]; if(!(category in categories)) categories[category] = []; categories[category].push(item); }).on('done', function() { var outputPath = path.join(__dirname, 'public/js/examples.js'); var templatePath = path.join(__dirname, '_templates/examples.hbs'); writeTemplate(categories, outputPath, templatePath); }).walk(); }
function getTemplateNames(root, cb) { var ret = []; walker(root).on('dir', function(p) { if(p.indexOf('node_modules') !== 0 && p[0] !== '.') ret.push(p); }).on('done', function() { cb(ret); }).walk(); }
metadataStore.take(1).observe(storedMetadata=>{ var finder = filewalker("/Users/thomash/Documents/organised/electronica",{maxPending:-1}); actionStream.filter(a => a.get("type")==="sendMeMore").observe(finder.resume); // storedMetadata = storedMetadata[1]; console.log("stored before",storedMetadata); var toTagStream = most.fromEvent("file", finder) .tap(log("fileEvent")) .filter(([path]) => (extensions.reduce((hasExtension, ext) => path.toLowerCase().endsWith(ext) || hasExtension, false))) .filter(([filename,stats,path]) => !storedMetadata.has(path)) actionStream.plug(toTagStream.filter(path=>fs.existsSync(path[2]+".asd")) .bufferedThrottle(500) .map(path => Imm.Map({path:path[2], type:"loadMetadata"})) // .tap(console.log.bind(console)) // .tap(finder.pause) ) finder.walk(); });
var replace = function(dir, fileFilter, searchPattern, replaceStr, fileEncoding, callback) { if(typeof fileEncoding === 'undefined') fileEncoding = 'utf8'; filewalker(dir) .on('file', function(p, s) { if(typeof this.matches === 'undefined') this.matches = 0; if(minimatch(path.basename(p), fileFilter)) { log('MATCH file: '+ p +' ['+ (s.size/1024).toFixed(2)+' Kb]'); this.matches++; readReplaceWrite(p, searchPattern, replaceStr, fileEncoding, errorHandler); } }) .on('error', function(err) { exit("ERROR! Exiting due to: "+err); }) .on('done', function() { log('Processed total of '+this.dirs + ' dirs, '+ this.files +' files. Matched '+this.matches+' files'); if(typeof callback === 'function') callback(this.matches); }) .walk(); };
/** * Read text files content from a directory, indexed into an object * @param {[type]} dir [description] * @param {[type]} onLoaded [description] * @return {[type]} [description] */ function load(dir, onLoaded) { var config = module.exports.config if (typeof dir !== 'string') { throw new Error('Cannot load text files: Invalid directory') } if (typeof onLoaded !== 'function') { throw new Error('Cannot load text files: Invalid callback') } function addToQueue(p, s, fullPath) { q.push({ fullPath: fullPath, dir: dir }) } function handleError(err) { onLoaded(err) } function onAllFilesFound() { q.drain = function () { onLoaded(null, indexedFilesContent) } } indexedFilesContent = {} filewalker(dir, { recursive: config.recursive, matchRegExp: config.matchRegExp }) .on('file', addToQueue) .on('error', handleError) .on('done', onAllFilesFound) .walk() }
var fs = require('fs'); var S = require('string'); var _ = require('underscore'); var mkdirp = require('mkdirp'); var filewalker = require('filewalker'); var args = process.argv; var allFiles = []; var basicPath = './' + args[2] + '/'; var destinationPath = './' + args[3] + '/'; var summary = {}; var summaryArray = []; filewalker(basicPath) .on('dir', function(p) { //console.log('dir: %s', p); }) .on('file', function(p, s) { //console.log('file: %s, %d bytes', p, s.size); if (S(p).right(5).s == '.html') { allFiles.push(p); parseFile(p); console.log(p + ' parsed.'); }else{ console.log(p); } }) .on('error', function(err) { console.error(err);
const filewalker = require('filewalker'), fs = require('fs'), path = require('path'); var options = {}; options['matchRegExp'] = /\.py$/; var lines = []; var tmpl = fs.readFileSync('noapi.html').toString('utf8'); var last = 0; filewalker(process.argv[2], options) .on('file', function(p, s) { if (p.search(/(api|alembic)\//) == 0) return; var matches = []; s = fs.readFileSync(path.join(process.argv[2], p)).toString('utf8'); s = s.split('\n'); for(var i in s) { if (s[i].search(/(db\.|dbget\()/) >=0) { var clazz, method; for(var j = i-1; j>=0; j--) { var l = s[j]; if (!method && (method = l.match(/def\s+(\w+)/))) { method = method[1]; if (method[0] == l[0]) break; } else if (clazz = l.match(/class\s+(\w+)/)) { clazz = clazz[1]; break;
bundleList.forEach(function (el) { console.log(el.name); var bundle = {}; bundle.ssffFiles = []; filewalker(pathToDbRoot) .on('dir', function (p) {}).on('file', function (p) { // var pattMedia = new RegExp('^SES[^/]+/' + el.name + '/[^/]+' + dbConfig.mediafileExtension + '$'); var pattMedia = new RegExp('^.+_ses+/' + el.name + '_bndl' + '/[^/]+' + dbConfig.mediafileExtension + '$'); // var pattAnnot = new RegExp('^SES[^/]+/' + el.name + '/[^/]+' + 'json' + '$'); var pattAnnot = new RegExp('^.+_ses+/' + el.name + '_bndl' + '/[^/]+' + 'json' + '$'); // read media file if (pattMedia.test(p)) { console.log(p); bundle.mediaFile = {}; bundle.mediaFile.encoding = 'BASE64'; bundle.mediaFile.filePath = p; } // read annotation file if (pattAnnot.test(p)) { bundle.annotation = {}; bundle.annotation.filePath = p; } // read ssffTracks for (var i = 0; i < dbConfig.ssffTracks.length; i++) { // var pattTrack = new RegExp('^SES[^/]+/' + el.name + '/[^/]+' + dbConfig.ssffTracks[i].fileExtension + '$'); var pattTrack = new RegExp('^.+_ses+/' + el.name + '_bndl' + '/[^/]+' + dbConfig.ssffTracks[i].fileExtension + '$'); if (pattTrack.test(p)) { bundle.ssffFiles.push({ ssffTrackName: dbConfig.ssffTracks[i].name, encoding: 'BASE64', filePath: p }); } } }).on('error', function (err) { console.log(err); return; }).on('done', function () { console.log(bundle); bundle.mediaFile.data = fs.readFileSync(pathToDbRoot + bundle.mediaFile.filePath, 'base64'); delete bundle.mediaFile.filePath; bundle.annotation = JSON.parse(fs.readFileSync(pathToDbRoot + bundle.annotation.filePath, 'utf8')); // delete bundle.annotation.filePath; console.log(bundle.ssffFiles) for (var i = 0; i < dbConfig.ssffTracks.length; i++) { bundle.ssffFiles[i].data = fs.readFileSync(pathToDbRoot + bundle.ssffFiles[i].filePath, 'base64'); delete bundle.ssffFiles[i].filePath; } console.log('##########################'); console.log('done'); // fs.writeFileSync('/Users/raphaelwinkelmann/Desktop/bundle.json', JSON.stringify(bundle, undefined, 0)); var wstream = fs.createWriteStream(destination + dbName + '/' + el.name + '_bndl.json'); wstream.write(JSON.stringify(bundle, undefined, 1)); wstream.end(); }).walk(); });
var fs = require('fs'); var filewalker = require('filewalker'); var archiver = require('archiver'); var output = fs.createWriteStream('book.epub'); var archive = archiver('zip', {zlib: {level: 9}}); archive.on('error', function(err) { throw err; }); filewalker('resources') .on('file', function(p) { console.log('dir: %s', p); //Add all files to the zip file. Compress all files, except mimetype. archive.append(fs.createReadStream('./resources/'+p), { name: p, store: (p=='mimetype') }); }) .on('error', function(err) { console.error(err); }) .on('done', function() { console.log('Found: %d dirs, %d files, %d bytes', this.dirs, this.files, this.bytes); archive.finalize(function(err, bytes) { if (err) { throw err; } console.log('Done. Compressed to ' + bytes + ' bytes'); }); }) .walk();
mv = require('mv'), basepath = '/home/bradgnar/ENC_ROOT', xml_fileholder = '/home/bradgnar/ENC_ROOT/all_xml_files', fileName, fileBeningRead, fileBeingWritten, child, options = { maxPending: 10 }; console.log(process.cwd()); process.chdir(basepath); filewalker('.', options) .on('dir', function(p){ //console.log('dir: %s', p); }) .on('file', function(relPath, stats, absPath){ var pathObj, execString; if(isCorrectFileType(absPath)){ pathObj = makePaths(absPath); execString = 'ogr2ogr -skipfailures -f "kml" ' + pathObj.newFile + ' ' + pathObj.fileName; child = exec(execString, {cwd: pathObj.basePath}, function(error, stdout, stderr){ if(error){ console.log(error.message + 'DDDDDDDD'); next();
passport = require('passport'), authConfig = require('./config/authentication') ; // Makes connection asynchronously. Mongoose will queue up database // operations and release them when the connection is complete. mongoose.connect(env.MONGO_URI, function (err) { if ( err ) { console.error('ERROR connecting to MongoDB: ' + env.MONGO_URI, err); return; } console.log('MongoDB successfully connected to: ' + env.MONGO_URI); }); // load mongo models filewalker(env.modelsPath, {matchRegExp: /.*\.js/i}).on('file', function(file){ require(path.join(env.modelsPath, file)); }).walk(); var app = express(), oneDay = 1 * 24 * 60 * 60 * 1000, oneYear = 365 * oneDay; // used in templates to get assets URLs app.locals.assetsURL = env.assetsURL; app.set('views', env.views); // configure template engine app.engine('html', nunjucks.render); nunjucks.configure(env.views, {