gulp.task('list', function () { gulp.src(dirs.source.copy) .pipe(gulpif(/[.](js)$/, uglify())) .pipe(gulpif(/[.](css)$/, csso())) .pipe(gulp.dest(dirs.build.html)); find.file(/\.html$/, dirs.build.html, function (files){ var names = [] , file; for(var i=0; i<files.length; i++){ file = files[i]; if(file.indexOf('index.html') == -1 || (file.match(/\//g) || []).length != 2){ continue; } names.push(path.dirname(file).replace('build/', '')); } gulp.src(dirs.source.list) .pipe(plumber()) .pipe(jade({ pretty: true , locals: {'pages': names} })) .pipe(gulp.dest(dirs.build.html)); }); });
return new Promise((resolve, reject) => { find.file(...args, files => { const relativeFiles = files .map(file => path.relative(directory, file)) .filter(file => file.charAt(0) !== '.'); resolve(relativeFiles); }).error(err => reject(err)); });
var load = function (src, includes, excludes) { var groups, files, def = deferred(); // TODO, use includes array of regexps to add // files to the workspace insted of just .js find.file(/\.js$/, src, function (fileSet) { // Excludes all files that match any of the exclusion rules, // and maps each of result to an object containing the // name of the file, folder and full path files = fileSet.filter(function (file) { return excludes.every(function (rule) { return !(new RegExp(rule).test(file)); }); }).map(function (file) { var data = PATH_REGEXP.exec(file); return { fullPath: data[0], path: data[1], name: data[2], docPath: data[1].replace(src, "") }; }); // Sorts files alphabetically by fullPath files = _.sortBy(files, function (file) { return file.fullPath.toLowerCase(); }); // Groups files based on their path and maps each // result to an object containing the path and package // of each final folder and the group of files it contains groups = _.map(_.groupBy(files, function (file) { return file.path; }), function (files, key) { return { path: key.replace(src, ""), ns: key.replace(src, "").replace(/\//g, ".").slice(0, -1), ns_class: key.replace(src, "").replace(/\//g, "_").slice(0, -1), files: files }; }); // Resolve the promise returning a 'fileset' object def.resolve({ files: files, groups: groups, length: files.length }); }); return def.promise; };
.action((path) => { const regexPattern = program.extension ? `^(.*)${program.extension}$` : '^(.*).htm(l?)$'; const regex = new RegExp(regexPattern); find.file(regex, path, (files) => { files .filter(filterBlacklisted) .filter(filterIgnorePath) .forEach(processFile); }); })
function build() { find.file(/\.md$/, '.', md => { md = md.filter(s => s.indexOf('node_modules') !== 0); md.forEach(gatherIntel); var metas = Array.from(articles.values()) .sort((a, b) => -((a.dateUpdated || a.date) - (b.dateUpdated || b.date))); var postMetas = metas.filter(meta => meta.filepath.indexOf('post/') === 0); var nonpostMetas = metas.filter(meta => meta.filepath.indexOf('post/') !== 0); var allTags = Array.from(new Set(flatten(postMetas.map(meta => meta.tags)))).sort(); postMetas.forEach((meta, midx) => { buildOneMarkdown(meta, postMetas[midx + 1], postMetas[midx - 1], postMetas); }); nonpostMetas.forEach( (meta, midx) => { buildOneMarkdown(meta, null, null, postMetas); }); // Atom feed! var feed = new Feed({ title : defaultMeta.title, description : defaultMeta.description, id : filepathToURL(''), link : filepathToURL(''), image : filepathToURL(defaultMeta.banner), copyright : 'Unless otherwise noted, released into the public domain under CC0.', updated : new Date(postMetas[0].dateUpdated || postMetas[0].date), author : {name : defaultMeta.author, link : filepathToURL(`#contact`)} }); postMetas.forEach(post => { feed.addItem({ title : post.title, id : filepathToURL(post.outfile), link : filepathToURL(post.outfile), description : post.description, author : [ {name : post.author, link : `${filepathToURL('')}/#contact`}, ], contributor : [], date : post.dateUpdated || post.date, image : post.socialBanner || post.banner }); }); allTags.forEach(tag => feed.addCategory(tag)); fs.writeFileSync('atom.xml', feed.atom1()); }); }
exports.find = function(pattern, directory, fileFilter) { var deferred = Q.defer() find .file(getFileFilter(fileFilter), directory, function(files) { Q.allSettled(getMatchedFiles(pattern, files)) .then(function (content) { deferred.resolve(getResults(content)); }) .done(); }) .error(function (err){ deferred.reject(err) }); return deferred.promise; };
gulp.task('list', function () { gulp.src(dirs.source.copy).pipe(gulp.dest(dirs.build.html)); find.file(/\.html$/, dirs.build.html, function (files){ var names = [] , file; for(var i=0; i<files.length; i++){ file = files[i]; if(file.indexOf('index.html')>-1 || (file.match(/\//g) || []).length>1){ continue; } names.push(path.basename(file)) } gulp.src(dirs.source.list) .pipe(jade({ pretty: true , locals: {'pages': names} })) .pipe(gulp.dest(dirs.build.html)); }); });
fs.stat(ctx.cfg.contentDir, function(errStat, stats) { if (errStat) { if ( errStat.code === 'ENOENT' ) { // no error if it doesn't exist - just don't copy any files return process.nextTick(callback) } return callback(errStat) } // if this isn't a directory, then it's an error if ( !stats.isDirectory() ) { return callback(new Error('files directory ' + ctx.cfg.filesDir + ' is not a directory')) } find.file(ctx.cfg.contentDir, function(filenames) { // loop through each filename and read the contents async.eachSeries( filenames, function(filename, done) { // skip over backup files if ( filename.match(/~$/) ) { return done() } fs.readFile(filename, 'utf8', function(errReadFile, data) { if (errReadFile) return done(errReadFile) var name = filename.substr(ctx.cfg.contentDir.length) ctx.file[name] = data done() }) }, function(err) { if (err) callback(err) callback() } ) }).error(callback) })
find.file(/index.jade$/, inputDir, (files) => { var tasks = files.map((tpl) => { var name = path.dirname(path.relative(inputDir, tpl)) return { input: tpl, output: path.join(outputDir, name, 'index.html'), content: require(path.join(path.dirname(tpl), 'content.json')), meta: { name: name, relativePathToRoot: '..' } } }) // shift output of home tasks .filter((task) => task.meta.name === 'home') .forEach((task) => { task.output = path.join(outputDir, 'index.html') task.meta.relativePathToRoot = '.' }) // add tasks for any collection routes tasks .forEach((task) => { var subPageMeta = packageSubPages[task.meta.name] if (!subPageMeta) return var collection = task.content[subPageMeta.field] // Add tasks to build each subpage to the queue collection.forEach((entry, ind) => { entry._slug = slug(entry[subPageMeta.slugFrom]) entry._index = ind var input = subPageMeta.template ? path.join(task.input, '..', `${subPageMeta.template}.jade`) : task.input tasks.push({ input: input, output: path.join(outputDir, task.meta.name, `${entry._slug}.html`), content: extend({}, task.content, { _entry: entry }), meta: { name: `${task.meta.name}/${entry._slug}`, relativePathToRoot: '..' } }) }) }) tasks.forEach((task) => { var locals = { meta: task.meta, content: task.content, facts: require('../facts.json'), pretty: true } extend(locals, helpers) task.html = jade.renderFile(task.input, locals) }) async.each(tasks, (task, done) => { mkdirp(path.dirname(task.output), () => { fs.writeFile(task.output, task.html, {encoding: 'utf8'}, done) }) }, (err) => { if (err) return console.error('build.js: ', err) console.log('Compiled %s templates in %sms', tasks.length, Date.now() - start) }) })
var find = require('find'); var fs = require('fs'); var filename = process.cwd() + '/README.md' fs.writeFile( filename, 'React Snippets' + '\n=================================\n' ); find.file(/\.md$/, __dirname + '/../snippets', function(files) { if(files.length){ files.forEach(function(file){ fs.readFile(file, function(err, data){ fs.appendFile( filename, data, function ( err ) {} ); } ); }) } })
async.waterfall( [ function( cb ){ find.file( "package.json", PROJECT_PATH, function( files ){ return cb( null, files ); } ); }, function( files, cb ){
function getFiles(path, cb) { find.file(path, (files) => { cb(files.filter(file => (!(/(^|\/)\.[^\/\.]/g).test(file)))); }); }
find.file(/\index.jade$/, inputDir, (files) => { var tasks = files.map((tpl) => { var name = path.dirname(path.relative(inputDir, tpl)) return { input: tpl, output: path.join(outputDir, name, 'index.html'), content: path.join(path.dirname(tpl), 'content.json'), meta: { name: name, relativePathToRoot: '..' } } }) // shift output of home tasks .filter((task) => task.meta.name === 'home') .forEach((task) => { task.output = path.join(outputDir, 'index.html') task.meta.relativePathToRoot = '.' }) tasks.forEach((task) => { var locals = { meta: task.meta, content: require(task.content), md: md, facts: require('../facts.json'), pretty: true } task.html = jade.renderFile(task.input, locals) }) async.each(tasks, (task, done) => { mkdirp(path.dirname(task.output), () => { fs.writeFile(task.output, task.html, {encoding: 'utf8'}, done) }) }, (err) => { if (err) return console.error('build.js: ', err) console.log('Compiled %s templates in %sms', tasks.length, Date.now() - start) }) })
.replace(/<code>/g, '<var>') .replace(/<\/code>/g, '</var>'); let markdown = turndownService.turndown(withoutCodeTag), sectionName = path.basename(file, path.extname(file)), baseUrl = `https://huasofoundries.github.io/google-maps-documentation`, linkBefore = `[See html formatted version](${baseUrl}/${sectionName}.html) `, markdownFinal = linkBefore + markdown; return fs.writeFileAsync( path.resolve(`${__dirname}/docs/${sectionName}.md`), markdownFinal, 'utf-8' ); } if (require.main === module) { let t_ini = Date.now(); find.file(/\.md/, path.resolve(`${__dirname}/html`), async files => { for (let file of files) { debug(`converting ${chalk.green(path.basename(file))} to MarkDown`); await convertToMD(file); } }); } module.exports = convertToMD;