return wrapGenerator(function($ctx0) { while (1) switch ($ctx0.prev = $ctx0.next) { case 0: $ctx0.prev = 0; $ctx0.next = 3; return fs.stat.bind(null, filename); case 3: return $ctx0.abrupt("return", $ctx0.sent); case 6: $ctx0.prev = 6; $ctx0.t0 = $ctx0.catch(0); if (!($ctx0.t0.code === 'ENOENT')) { $ctx0.next = 10; break; } throw new Error('file "' + filename + '" does not exist.'); case 10: throw $ctx0.t0; case 11: case "end": return $ctx0.stop(); } }, this, [[0, 6]]);
it('invalidates the cache per file or per-field', () => { fs.stat.mockImpl((file, callback) => callback(null, { mtime: { getTime: () => {}, }, }) ); var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve('banana') ); var file = '/rootDir/someFile'; return cache.get(file, 'field', loaderCb).then(() => { expect(cache.has(file)).toBe(true); cache.invalidate(file, 'field'); expect(cache.has(file)).toBe(true); expect(cache.has(file, 'field')).toBe(false); cache.invalidate(file); expect(cache.has(file)).toBe(false); }); });
pit('caches the value after the first call', () => { fs.stat.mockImpl((file, callback) => { callback(null, { mtime: { getTime: () => {}, }, }); }); var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve('lol') ); return cache .get('/rootDir/someFile', 'field', loaderCb) .then(() => { var shouldNotBeCalled = jest.genMockFn(); return cache.get('/rootDir/someFile', 'field', shouldNotBeCalled) .then(value => { expect(shouldNotBeCalled).not.toBeCalled(); expect(value).toBe('lol'); }); }); });
pit('supports storing multiple fields', () => { fs.stat.mockImpl((file, callback) => { callback(null, { mtime: { getTime: () => {}, }, }); }); var cache = new Cache({ cacheKey: 'cache', }); var index = 0; var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve(index++) ); return cache .get('/rootDir/someFile', 'field1', loaderCb) .then(value => { expect(value).toBe(0); return cache .get('/rootDir/someFile', 'field2', loaderCb) .then(value2 => expect(value2).toBe(1)); }); });
it('should write cache to disk', () => { var index = 0; var mtimes = [10, 20, 30]; fs.stat.mockImpl((file, callback) => callback(null, { mtime: { getTime: () => mtimes[index++], }, }) ); var cache = new Cache({ cacheKey: 'cache', }); cache.get('/rootDir/bar', 'field', () => Promise.resolve('bar value') ); cache.get('/rootDir/foo', 'field', () => Promise.resolve('foo value') ); cache.get('/rootDir/baz', 'field', () => Promise.resolve('baz value') ); // jest has some trouble with promises and timeouts within promises :( jest.runAllTimers(); jest.runAllTimers(); expect(fs.writeFile).toBeCalled(); });
pit('should not load outdated cache', () => { fs.stat.mockImpl((file, callback) => callback(null, { mtime: { getTime: () => {}, }, }) ); fileStats['/rootDir/foo'].mtime.getTime = () => 123; var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve('new value') ); return cache .get('/rootDir/someFile', 'field', loaderCb) .then(value => { expect(loaderCb).not.toBeCalled(); expect(value).toBe('oh hai'); return cache .get('/rootDir/foo', 'field', loaderCb) .then(val => { expect(loaderCb).toBeCalled(); expect(val).toBe('new value'); }); }); });
pit('clears old field when getting new field and mtime changed', () => { var mtime = 0; fs.stat.mockImpl((file, callback) => { callback(null, { mtime: { getTime: () => mtime++, }, }); }); var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve('lol' + mtime) ); return cache .get('/rootDir/someFile', 'field1', loaderCb) .then(value => cache .get('/rootDir/someFile', 'field2', loaderCb) .then(value2 => cache .get('/rootDir/someFile', 'field1', loaderCb) .then(value3 => expect(value3).toBe('lol2')) ) ); });
exports.exists = function* (filename) { try { return yield fs.stat.bind(null, filename); } catch (err) { if (err.code === 'ENOENT') throw new Error('file "' + filename + '" does not exist.'); throw err; } }
async.forEach(fileNames, function(fileName, next) { //log.silly("generate#_walkFolder()", "fileName:", fileName, "dirPath:", dirPath); var filePath = path.join(dirPath, fileName); async.waterfall([ fs.stat.bind(null, filePath), function(stat, next) { var name = generator.normalizePath(path.join(dirName, fileName)); if (stat.isFile()) { context.fileList.push({name: name, path: filePath}); setImmediate(next); } else { _walkFolder(context, name, filePath, next); } } ], next); }, next);
pit('gets the value from the loader callback', () => { fs.stat.mockImpl((file, callback) => callback(null, { mtime: { getTime: () => {}, }, }) ); var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve('lol') ); return cache .get('/rootDir/someFile', 'field', loaderCb) .then(value => expect(value).toBe('lol')); });
pit('calls loader callback for uncached file', () => { fs.stat.mockImpl((file, callback) => { callback(null, { mtime: { getTime: () => {}, }, }); }); var cache = new Cache({ cacheKey: 'cache', }); var loaderCb = jest.genMockFn().mockImpl(() => Promise.resolve()); return cache .get('/rootDir/someFile', 'field', loaderCb) .then($ => expect(loaderCb).toBeCalledWith('/rootDir/someFile') ); });
return wrapGenerator(function($ctx0) { while (1) switch ($ctx0.prev = $ctx0.next) { case 0: filename = join(folder, 'component.json'); $ctx0.prev = 1; $ctx0.next = 4; return fs.stat.bind(null, filename); case 4: $ctx0.next = 9; break; case 6: $ctx0.prev = 6; $ctx0.t0 = $ctx0.catch(1); return $ctx0.abrupt("return", false); case 9: return $ctx0.abrupt("return", true); case 10: case "end": return $ctx0.stop(); } }, this, [[1, 6]]);
/** * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. * @param { Object } opts Options to specify root (start directory), filters and recursion depth * @param { function } callback1 When callback2 is given calls back for each processed file - function (fileInfo) { ... }, * when callback2 is not given, it behaves like explained in callback2 * @param { function } callback2 Calls back once all files have been processed with an array of errors and file infos * function (err, fileInfos) { ... } */ function readdir(opts, callback1, callback2) { var stream , handleError , handleFatalError , pending = 0 , errors = [] , readdirResult = { directories: [] , files: [] } , fileProcessed , allProcessed , realRoot , aborted = false ; // If no callbacks were given we will use a streaming interface if (isUndefined(callback1)) { var api = require('./stream-api')(); stream = api.stream; callback1 = api.processEntry; callback2 = api.done; handleError = api.handleError; handleFatalError = api.handleFatalError; stream.on('close', function () { aborted = true; }); } else { handleError = function (err) { errors.push(err); }; handleFatalError = function (err) { handleError(err); allProcessed(errors, null); }; } if (isUndefined(opts)){ handleFatalError(new Error ( 'Need to pass at least one argument: opts! \n' + 'https://github.com/thlorenz/readdirp#options' ) ); return stream; } opts.root = opts.root || '.'; opts.fileFilter = opts.fileFilter || function() { return true; }; opts.directoryFilter = opts.directoryFilter || function() { return true; }; opts.depth = typeof opts.depth === 'undefined' ? 999999999 : opts.depth; opts.entryType = opts.entryType || 'files'; var statfn = opts.lstat === true ? fs.lstat.bind(fs) : fs.stat.bind(fs); if (isUndefined(callback2)) { fileProcessed = function() { }; allProcessed = callback1; } else { fileProcessed = callback1; allProcessed = callback2; } function normalizeFilter (filter) { if (isUndefined(filter)) return undefined; function isNegated (filters) { function negated(f) { return f.indexOf('!') === 0; } var some = filters.some(negated); if (!some) { return false; } else { if (filters.every(negated)) { return true; } else { // if we detect illegal filters, bail out immediately throw new Error( 'Cannot mix negated with non negated glob filters: ' + filters + '\n' + 'https://github.com/thlorenz/readdirp#filters' ); } } } // Turn all filters into a function if (isFunction(filter)) { return filter; } else if (isString(filter)) { return function (entryInfo) { return minimatch(entryInfo.name, filter.trim()); }; } else if (filter && Array.isArray(filter)) { if (filter) filter = filter.map(function (f) { return f.trim(); }); return isNegated(filter) ? // use AND to concat multiple negated filters function (entryInfo) { return filter.every(function (f) { return minimatch(entryInfo.name, f); }); } : // use OR to concat multiple inclusive filters function (entryInfo) { return filter.some(function (f) { return minimatch(entryInfo.name, f); }); }; } } function processDir(currentDir, entries, callProcessed) { if (aborted) return; var total = entries.length , processed = 0 , entryInfos = [] ; fs.realpath(currentDir, function(err, realCurrentDir) { if (aborted) return; if (err) { handleError(err); callProcessed(entryInfos); return; } var relDir = path.relative(realRoot, realCurrentDir); if (entries.length === 0) { callProcessed([]); } else { entries.forEach(function (entry) { var fullPath = path.join(realCurrentDir, entry) , relPath = path.join(relDir, entry); statfn(fullPath, function (err, stat) { if (err) { handleError(err); } else { entryInfos.push({ name : entry , path : relPath // relative to root , fullPath : fullPath , parentDir : relDir // relative to root , fullParentDir : realCurrentDir , stat : stat }); } processed++; if (processed === total) callProcessed(entryInfos); }); }); } }); } function readdirRec(currentDir, depth, callCurrentDirProcessed) { if (aborted) return; fs.readdir(currentDir, function (err, entries) { if (err) { handleError(err); callCurrentDirProcessed(); return; } processDir(currentDir, entries, function(entryInfos) { var subdirs = entryInfos .filter(function (ei) { return ei.stat.isDirectory() && opts.directoryFilter(ei); }); subdirs.forEach(function (di) { if(opts.entryType === 'directories' || opts.entryType === 'both' || opts.entryType === 'all') { fileProcessed(di); } readdirResult.directories.push(di); }); entryInfos .filter(function(ei) { var isCorrectType = opts.entryType === 'all' ? !ei.stat.isDirectory() : ei.stat.isFile() || ei.stat.isSymbolicLink(); return isCorrectType && opts.fileFilter(ei); }) .forEach(function (fi) { if(opts.entryType === 'files' || opts.entryType === 'both' || opts.entryType === 'all') { fileProcessed(fi); } readdirResult.files.push(fi); }); var pendingSubdirs = subdirs.length; // Be done if no more subfolders exist or we reached the maximum desired depth if(pendingSubdirs === 0 || depth === opts.depth) { callCurrentDirProcessed(); } else { // recurse into subdirs, keeping track of which ones are done // and call back once all are processed subdirs.forEach(function (subdir) { readdirRec(subdir.fullPath, depth + 1, function () { pendingSubdirs = pendingSubdirs - 1; if(pendingSubdirs === 0) { callCurrentDirProcessed(); } }); }); } }); }); } // Validate and normalize filters try { opts.fileFilter = normalizeFilter(opts.fileFilter); opts.directoryFilter = normalizeFilter(opts.directoryFilter); } catch (err) { // if we detect illegal filters, bail out immediately handleFatalError(err); return stream; } // If filters were valid get on with the show fs.realpath(opts.root, function(err, res) { if (err) { handleFatalError(err); return stream; } realRoot = res; readdirRec(opts.root, 0, function () { // All errors are collected into the errors array if (errors.length > 0) { allProcessed(errors, readdirResult); } else { allProcessed(null, readdirResult); } }); }); return stream; }
/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ var fs = require("graceful-fs"); function NodeJsInputFileSystem() {} module.exports = NodeJsInputFileSystem; NodeJsInputFileSystem.prototype.isSync = function() { return false; }; NodeJsInputFileSystem.prototype.stat = fs.stat.bind(fs); NodeJsInputFileSystem.prototype.readdir = fs.readdir.bind(fs); NodeJsInputFileSystem.prototype.readFile = fs.readFile.bind(fs);