let loadData = (path, callback) => { if (typeof path === 'function') { callback = path path = 'data' } Promise.all([ fs.readdir(`${path}/form_data`), fs.readdir(`${path}/requests`), ]).then(files => { const formDataFilepaths = files[0].map(x => `${path}/form_data/${x}`) const requestFilepaths = files[1].map(x => `${path}/requests/${x}`) console.log('Loading files...') Promise.all([ loadFiles(formDataFilepaths), loadFiles(requestFilepaths) ]).then(contents => { const formDataJSON = contents[0].map(x => JSON.parse(x)) const requestsJSON = contents[1].map(x => JSON.parse(x)) const requests = requestsJSON.map(x => new Request( x.id, x.filename, x.time, x.url, x.pageName, x.ipAddress, x.headers, x.userId, x.previousRequestIds, x.geo.countryCode, x.geo.regionCode )) const requestsDict = requests.reduce((o, x) => { o[x.id] = x return o }, {}) const formData = formDataJSON.map(x => (new FormData( x.post, requestsDict[x.requestId] ))) const context = new Context(formData, requests) callback(context) console.log('Invoked Callback') }).catch(error => console.log(error)) }).catch(error => console.log(error)) }
function * scanBlogMetas (dir, exclude) { let metas = [] // read all markdown files let entries = yield fs.readdir(dir) yield entries.map(function * (entry) { if (!entry.match(/\.md$/) || exclude.indexOf(entry) >= 0) { return } let file = path.join(dir, entry) let stat = utils.fstat(file) if (stat.isFile()) { let parsed = yield parseMarkdown(file) metas.push({ meta: parsed.meta, file: file }) } else { Array.prototype.push.apply(metas, yield scanBlogMetas(file, exclude)) } }) return metas }
await Promise.all(spirits.map(async function(spirit){ const settingsExists = await isFile(spiritSettingsJson(spirit)); const yamlExists = await isFile(spiritContainerConfig(spirit)); const jsonExists = await isFile(spiritContainerConfigJson(spirit)); if(jsonExists){ const json = await fs.readFile(spiritContainerConfigJson(spirit), 'utf8'); const config = JSON.parse(json); if(!settingsExists){ const settings = createSettings(spirit, config); await fs.writeFile(spiritSettingsJson(spirit), JSON.stringify(settings, null, ' ')); } if(!yamlExists){ console.log('writing yaml for ', spirit); const yamlConfig = createYamlConfig(spirit, config); await yamlConfig.save(); } } const files = await fs.readdir(spiritLives(spirit)); const lives = await filterAsync(files, life => isDirectory(spiritLife(spirit, life))); await Promise.all(lives.map(async function(life){ const yamlExists = await isFile(spiritLifeContainerConfig(spirit, life)); const jsonExists = await isFile(spiritLifeContainerConfigJson(spirit, life)); if(!yamlExists && jsonExists){ const json = await fs.readFile(spiritLifeContainerConfigJson(spirit, life), 'utf8'); const config = JSON.parse(json); const yamlConfig = createYamlConfig(spirit, config); await yamlConfig.saveLife(life); } })); }));
export async function loadUploadedFiles () { const files = await fs.readdir(UPLOAD_DIR) // we need this to know if we got a file or a directory const filesInfos = await Promise.all( files.map((file) => fs.stat(`${UPLOAD_DIR}/${file}`)) ) // we filter (TODO cleaner way to exclude / include -- dotfiles ? mimetype ? extension ?) const filtered = files.filter((file, key) => filesInfos[key].isFile() && file !== '.DS_Store') // let's wait till we are sure all thumbs are generated await Promise.all( filtered.map((file) => writeThumb(`${UPLOAD_DIR}/${file}`, `${THUMB_UPLOAD_DIR}/${file}`)) ) // let's wait till we got all the exifs we need const exifs = await Promise.all( filtered.map((file) => getExifPromise(`${UPLOAD_DIR}/${file}`)) ) return filtered.map((filename, key) => { return { filename, exif: exifs[key] } }) }
co(function *() { let files = yield fs.readdir('markdown'); let tasks = files.map(function(file) { return co(md2html('markdown/' + path.basename(file, '.md'))); }); yield Promise.all(tasks); }).catch(function(err) {
function filterOutOldFlags(flags) { console.log('Task 3 of 5: Filtering out existing flags'); return !fs.existsSync(metaPath) ? flags : fs.readdir(metaPath) .then(files => flags.filter(f => files.indexOf(f.meta) == -1)); }
function filterOutOldImages(flags) { console.log(util.format('Task 5 of 6: Filtering out existing images', flags.length)); return !fs.existsSync(imgPath) ? flags : fs.readdir(imgPath) .then(files => flags.filter(f => files.indexOf(f.img) == -1)); }
gulp.task('release-clean', async () => { if (!await fs.exists('release')) { await fs.mkdir('release'); } const files = (await fs.readdir('release')).filter(file => !/^\./.test(file)); for (const file of files) { await rm(path.resolve(__dirname, 'release', file)); } });
return isDirectory(dirPath).then(function (isDir) { if (isDir) { var searchOnDir = function searchOnDir(dir) { return search(path.join(dirPath, dir)); }; return Q.all(Q.map(fsp.readdir(dirPath), searchOnDir)); } else { paths.push(dirPath); }; });
function getTypoMapsPromise () { return fsp .readdir(typoMapsPath) .then(fileNames => fileNames .filter(name => /\.yaml$/.test(name)) .map(name => path.join(typoMapsPath, name)) ) .then(filePaths => Promise.all( filePaths.map(filePath => fsp.readFile(filePath)) )) .then(fileContents => fileContents.map(yaml.safeLoad)) }
// Recursively get JS async function getFiles(dir) { const files = await Promise.all((await fsp.readdir(dir)).map(async (file) => { const path = `${dir}/${file}`; const stat = await fsp.stat(path); if (stat.isDirectory()) return getFiles(path); if (excludedFiles.includes(file)) return null; if (!file.match(/.js$/)) return null; return path; })); // Flatten the return return flatten(...files.filter(v => v)); }
function loadDir(fullPath) { var info = {}; return fs.readdir(fullPath).then(function(files) { return Promise.all(files.map(function(file){ var fPath = Path.normalize(fullPath+'/'+file); return fs.readFile(fPath, {encoding:'utf8'}).then(function(content) { info[file] = content; }); })); }).then(function() { return info; }); }
async function rm(filepath) { if (await fs.exists(filepath)) { if ((await fs.stat(filepath)).isDirectory()) { await Promise.all( (await fs.readdir(filepath)) .map(item => rm(path.resolve(filepath, item))) ); await fs.rmdir(filepath); } else { await fs.unlink(filepath); } } }
function setupFiles() { if(encFiles.length) { return Promise.resolve(encFiles); } var filesDir = Path.join(__dirname, "files"); return fs.readdir(filesDir).then(function(files) { return Promises.all(files.map(function(file){ var iFile = Path.join(filesDir, file); return fs.readFile(iFile).then(function(content){ encFiles.push({ file: file, content: content }); }); })); }); }
test('main html download checker', async (done) => { const tmpDir = fs.mkdtempSync(`${os.tmpdir()}${path.sep}`); const mainFile = path.join(tmpDir, 'localhost-testpath.html'); const subFile = path.join(tmpDir, 'localhost-testpath_files', 'localhost-lessons.rss'); const [result, files, expectContent, toBeContent] = await Promise.all([ await pageLoader('http://localhost/testpath', tmpDir), await fs.readdir(path.join(tmpDir, 'localhost-testpath_files')), await fs.readFile(mainFile, 'utf8'), await fs.readFile(path.join('__tests__', '__fixtures__', 'hexlet-io-courses_subst.html'), 'utf8'), ]); expect(expectContent).toBe(toBeContent); expect(fs.access(subFile)).toBeTruthy(); expect(files.includes('localhost-lessons.rss')).toBeTruthy(); expect(result).toBe('Download completed successfully'); done(); });
function getFlags() { var queue = new Queue(concurrencyLimit); return fs.readdir(metaPath).then(function(files){ var flags = files.filter(function(f) { return f.endsWith('.json') }); bar = new ProgressBar(':percent, ETA: :eta seconds', { total: flags.length }); return Promise.all(flags.map(addToQueue)); function addToQueue(flag) { return queue.add(() => processFlag(flag)); } }); }
CachedDir.prototype.read = function() { var self = this return this.updating = fs.readdir(this.path).then(function(files) { self.updating = null self.upToDate = true for (var file in self.cache) if (files.indexOf(file) == -1) { self.cache[file].detach() delete self.cache[file] } for (var i = 0; i < files.length; i++) { var file = files[i] if (!(file in self.cache)) self.cache[file] = dummy } if (self.watching) self.watching.close() self.watching = fs.watch(self.path, self.listen.bind(self)) return self.cache }) }
return new Promise(function(resolve, reject) { try{ fsp.readdir('./jsondata') .then(function(files) { fileCount = 1; files.forEach(function(file) { fsp.readFile("./jsondata/" + file) .then(function(fileData) { var json = JSON.parse(fileData.toString()); if (json.response.track !== undefined) { //loop through array of attributes for (var attrI = 0; attrI < data.children.length; attrI++) { var currentChildObj = data.children[attrI], currentChildName = currentChildObj.name; _setDataAttr(currentChildObj, currentChildName, json, files, resolve); }; fileCount += 1; } else { fileCount += 1; } }).catch(function(e) { console.log(e); }); }); }); } catch(e){ throw e; console.log(e); return; reject(e); } });
const getFiles = async () => { const dir = (await fs.readdir(repoPath)) .filter(file => !(['LICENSE', 'README.md', 'USERS.md'].includes(file))) const files = await Promise.all( dir.map(async filename => ({ filename, words: await fs.readFile(`${repoPath}/${filename}`, 'utf8') })) ) const toArray = files .map(file => ({ ...file, words: file.words.split('\n').filter(words => words.length) })) return toArray }
async function convertToYaml () { let basePath = path.join(__dirname + '/../../dkeg/') let commentReg = /(^!.*)/gm let defineReg = /^#define\s/gm let nameReg = /(^[a-zA-Z0-9]+)/gm let metaReg = /^!.*(Title|Author|Created):\s*([a-zA-z0-9\s()]*$)/gm let colorReg = /(#[a-zA-Z0-9]{6})/g // Get the schemes let schemes try { schemes = await fs.readdir(basePath) } catch (error) { return console.log('Couldn\'t read the schemes:', error) } // Convert each scheme and save with yaml extension await Promise.all(schemes.map(async (scheme) => { console.log(scheme) let file try { file = await fs.readFile(basePath + scheme, 'utf8') } catch (err) { console.log(err) } // Turn comment into yaml comment let update = file.replace(metaReg, '$1 "$2"') update = update.replace(commentReg, '#$1') update = update.replace(defineReg, '') update = update.replace (nameReg, "$1:") update = update.replace(colorReg, '"$1"') try { await fs.writeFile(basePath + 'yaml/' + scheme + '.yml', update) } catch (err) { console.log('Could not write file:', err) } })) }
find(2000).then(function (vagrantDir) { // configuration directory var confDir = path.join(vagrantDir, 'config', 'nginx-config', 'sites'); // promise a conf directory file listing var confPromise = fs.readdir(confDir); var isConfFile = /\.conf$/; // log the available sites confPromise.then(function (files) { var listMsg = [''].concat(files.filter(function (file) { return isConfFile.test(file); }).map(function (file) { return path.basename(file, '.conf'); })).join('\n'); console.log(listMsg); }); }).catch(function (message) {
static async clearPreviousLog() { // 清理之前的运行日志 Logger.instance.info('[acgd] Start to clear previous logs ...'); let logPath = libPath.join(__dirname, '..', 'log'); let logExists = await libFsp.exists(logPath); if (!logExists) { return; } let files = await libFsp.readdir(logPath); for (let filename of files) { if (filename == 'placeholder' || filename == Logger.logName) { return; } let filePath = libPath.join(logPath, filename); await libFsp.unlink(filePath); Logger.instance.info('[acgd] Log deleted: %s', filePath); } }
async function recursiveRead(dir) { try { let files = await fs.readdir(dir) if (!files) return files.forEach(async file => { if (file.match(/node_modules|\.git/)) return file = `${dir}/${file}` let stats = await fs.lstat(file) if (stats.isFile()) { return console.log(file) } recursiveRead(file) }) } catch (err) { console.log('error:', err) } }
it('should substitute values', function(){ var testTplDir = './test/fixtures-init/templates'; var tests = {}; sinon.stub(fs, 'writeFile').callsFake(function(fileName, content) { //console.log("stub writeFile", fileName, content) return Promise.resolve(content); }); return fs.readdir(testTplDir).then(function(files) { return Promise.all(files.map(function(file){ var fPath = Path.normalize(testTplDir+'/'+file); return fs.readFile(fPath, {encoding:'utf8'}).then(function(content) { var name = file.substr(0, file.length-4); if(! (name in tests)) { tests[name] = {}; } var f = tests[name]; if(file.match(/(.tpl)$/)) { f.input = { file: fPath, data: content }; } else { f.output = { file: fPath, data: content }; } }); })); }).then(function() { var keys = ['year', 'author', 'name', 'desc', 'cucardas']; var kvPairs = {}; keys.forEach(function(key) { kvPairs[key] = 'valueOf'+ key.charAt(0).toUpperCase() + key.slice(1); }); var testsArray = []; for(var t in tests) { testsArray.push(tests[t]); } return Promise.all(testsArray.map(function(test) { return qci.writeTemplate(test.input.file, test.output.file, kvPairs).then(function(out) { expect(out).to.eql(test.output.data); //console.log("out", out); }); })); }).then(function() { return fs.writeFile.restore(); }).catch(); });
fGetSubPaths = function( sFromPath ) { return fs.readdir( sFromPath ).then( ( aPaths ) => { return aPaths.map( ( sPath ) => `${ sFromPath }/${ sPath }` ); } ); };
#!/usr/bin/env node var uglify = require('uglify-js'); var fs = require('fs-promise'); var pathUtils = require('path'); var mustache = require('mustache'); var browserify = require('browserify') var object = require('../object') var mapObject = require('../mapObject') var streamToString = require('stream-to-string') var PassThrough = require('stream').PassThrough var bytes = require('bytes') fs.readdir('.').then(list => { var jsfilenames = list.filter(item => item.match(/\.js$/) && !item.match(/index\.js$/)); return Promise.all(jsfilenames.map(filename => { return fs.readFile(filename, 'utf-8').then(js => { return { filename: filename, js: js }; }); })); }).then(files => { var sizes = {}; files.forEach(file => { var key = pathUtils.basename(file.filename, '.js'); var minjs = minSize(file.js) sizes[key] = minjs
jsonfile.readFile(path.join(__dirname, 'schema.json')).then((schema) => { fs.readdir(path.join(__dirname, 'banks')).then(files => lint(files, schema)).catch((err) => { helper.error(err); process.exit(1); }); }).catch(helper.error);
"use strict" var Parser = require('parse-xl') var Promises = require('best-promise'); var fs = require('fs-promise'); fs.readdir('.').then(function(files) { return Promises.all(files.filter(function(file) { return file.match(/(\.xlsx?)$/); }).map(function(xls) { console.log('xls: ', xls); var sample = new Parser(xls); console.log(" ", JSON.stringify(sample,null,' ')); })); }).catch(function(err) { console.log("Error", err) });
return api.get('/modules/browserify-server-test-broken', {exceptions: false}).then(function (response) { expect(response.statusCode).to.equal(500); return fs.readdir(packagesDir).then(function (files) { expect(files).to.eql([]); }); });
function packageDirectory() { return fs.readdir(packagesDir).then(function (packages) { expect(packages.length).to.equal(1); return packagesDir + '/' + packages[0] + '/node_modules'; }); }