function(result, callback) { fs.readFile(srcPath, 'utf8', callback); },
fileList.forEach(function(file) { var module = file .replace(clientLibPath, '') .replace(this.outermostPath, '') .replace('.js',''); if (module == 'jmsclient') { fs.readFile(clientLibPath + 'jmsclient.js', 'utf8', function (err, jmsclient) { fs.readFile(clientLibPath + 'almond.js', 'utf8', function (err, almond) { log.verbose('load', 'jmsclient'); var streamData = { module: 'jmsclient', mtime: new Date(), path: 'jmsclient', source: almond + jmsclient }; this.push(JSON.stringify(streamData), 'utf8'); listLength -= 1; if (listLength === 0) { this.push(null); } }.bind(this)); }.bind(this)); return; } fs.stat(file, function (err, stat) { fs.readFile(file, 'utf8', function (err, data) { if (err) { // TODO log.error('moduleloader readFile error'); return; } var streamData = { sourceId: this.source, stage: this.stage, module: module, mtime: new Date(stat.mtime), path: file, source: data }; log.verbose('load', module); this.push(JSON.stringify(streamData), 'utf8'); listLength -= 1; if (listLength === 0) { this.push(null); } }.bind(this)); }.bind(this)); }.bind(this));
// if the -S|--save option is specified, then write installed packages // as dependencies to a package.json file. // This is experimental. function save(where, installed, tree, pretty, cb) { if (!npm.config.get("save") && !npm.config.get("save-dev") && !npm.config.get("save-optional") || npm.config.get("global")) { return cb(null, installed, tree, pretty) } var saveBundle = npm.config.get('save-bundle') // each item in the tree is a top-level thing that should be saved // to the package.json file. // The relevant tree shape is { <folder>: {what:<pkg>} } var saveTarget = path.resolve(where, "package.json") , things = Object.keys(tree).map(function (k) { // if "what" was a url, then save that instead. var t = tree[k] , u = url.parse(t.from) , w = t.what.split("@") if (u && u.protocol) w[1] = t.from return w }).reduce(function (set, k) { var rangeDescriptor = semver.valid(k[1]) && semver.gte(k[1], "0.1.0") ? "~" : "" set[k[0]] = rangeDescriptor + k[1] return set }, {}) // don't use readJson, because we don't want to do all the other // tricky npm-specific stuff that's in there. fs.readFile(saveTarget, function (er, data) { // ignore errors here, just don't save it. try { data = JSON.parse(data.toString("utf8")) } catch (ex) { er = ex } if (er) { return cb(null, installed, tree, pretty) } var deps = npm.config.get("save-optional") ? "optionalDependencies" : npm.config.get("save-dev") ? "devDependencies" : "dependencies" if (saveBundle) { var bundle = data.bundleDependencies || data.bundledDependencies delete data.bundledDependencies if (!Array.isArray(bundle)) bundle = [] data.bundleDependencies = bundle } log.verbose('saving', things) data[deps] = data[deps] || {} Object.keys(things).forEach(function (t) { data[deps][t] = things[t] if (saveBundle) { var i = bundle.indexOf(t) if (i === -1) bundle.push(t) } }) data = JSON.stringify(data, null, 2) + "\n" fs.writeFile(saveTarget, data, function (er) { cb(er, installed, tree, pretty) }) }) }
function savePackageJson (tree, next) { validate('OF', arguments) var saveBundle = npm.config.get('save-bundle') // each item in the tree is a top-level thing that should be saved // to the package.json file. // The relevant tree shape is { <folder>: {what:<pkg>} } var saveTarget = path.resolve(tree.path, 'package.json') // don't use readJson, because we don't want to do all the other // tricky npm-specific stuff that's in there. fs.readFile(saveTarget, 'utf8', iferr(next, function (packagejson) { const indent = detectIndent(packagejson).indent const newline = detectNewline(packagejson) try { tree.package = parseJSON(packagejson) } catch (ex) { return next(ex) } // If we're saving bundled deps, normalize the key before we start if (saveBundle) { var bundle = tree.package.bundleDependencies || tree.package.bundledDependencies delete tree.package.bundledDependencies if (!Array.isArray(bundle)) bundle = [] } var toSave = getThingsToSave(tree) var toRemove = getThingsToRemove(tree) var savingTo = {} toSave.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true }) toRemove.forEach(function (pkg) { if (pkg.save) savingTo[pkg.save] = true }) Object.keys(savingTo).forEach(function (save) { if (!tree.package[save]) tree.package[save] = {} }) log.verbose('saving', toSave) const types = ['dependencies', 'devDependencies', 'optionalDependencies'] toSave.forEach(function (pkg) { if (pkg.save) tree.package[pkg.save][pkg.name] = pkg.spec const movedFrom = [] for (let saveType of types) { if ( pkg.save !== saveType && tree.package[saveType] && tree.package[saveType][pkg.name] ) { movedFrom.push(saveType) delete tree.package[saveType][pkg.name] } } if (movedFrom.length) { log.notice('save', `${pkg.name} is being moved from ${movedFrom.join(' and ')} to ${pkg.save}`) } if (saveBundle) { var ii = bundle.indexOf(pkg.name) if (ii === -1) bundle.push(pkg.name) } }) toRemove.forEach(function (pkg) { if (pkg.save) delete tree.package[pkg.save][pkg.name] if (saveBundle) { bundle = without(bundle, pkg.name) } }) Object.keys(savingTo).forEach(function (key) { tree.package[key] = deepSortObject(tree.package[key]) }) if (saveBundle) { tree.package.bundleDependencies = deepSortObject(bundle) } var json = stringifyPackage(tree.package, indent, newline) if (json === packagejson) { log.verbose('shrinkwrap', 'skipping write for package.json because there were no changes.') next() } else { writeFileAtomic(saveTarget, json, next) } })) }
StaticFile.prototype.readServer = function (filepath, callback) { filepath = path.resolve(this.path, './' + filepath); fs.readFile(filepath, 'utf8', callback); };
return new Promise((resolve, reject) => { gfs.readFile(file, (err, contents) => { err ? reject(err) : resolve(contents) }) })
function readJson_ (file, log, strict, cb) { fs.readFile(file, "utf8", function (er, d) { parseJson(file, er, d, log, strict, cb) }) }
self.batchSession.on("request.clientFile", function (file, reply) { // TODO Basedir checks. fs.readFile(self.basedir + "/" + file, reply); });
function savePackageJson (args, tree, next) { validate('AOF', arguments) var saveBundle = npm.config.get('save-bundle') // each item in the tree is a top-level thing that should be saved // to the package.json file. // The relevant tree shape is { <folder>: {what:<pkg>} } var saveTarget = path.resolve(tree.path, 'package.json') // don't use readJson, because we don't want to do all the other // tricky npm-specific stuff that's in there. fs.readFile(saveTarget, iferr(next, function (packagejson) { try { packagejson = parseJSON(packagejson) } catch (ex) { return next(ex) } // If we're saving bundled deps, normalize the key before we start if (saveBundle) { var bundle = packagejson.bundleDependencies || packagejson.bundledDependencies delete packagejson.bundledDependencies if (!Array.isArray(bundle)) bundle = [] } var toSave = getThingsToSave(tree) var toRemove = getThingsToRemove(args, tree) var savingTo = {} toSave.forEach(function (pkg) { savingTo[pkg.save] = true }) toRemove.forEach(function (pkg) { savingTo[pkg.save] = true }) Object.keys(savingTo).forEach(function (save) { if (!packagejson[save]) packagejson[save] = {} }) log.verbose('saving', toSave) toSave.forEach(function (pkg) { packagejson[pkg.save][pkg.name] = pkg.spec if (saveBundle) { var ii = bundle.indexOf(pkg.name) if (ii === -1) bundle.push(pkg.name) } }) toRemove.forEach(function (pkg) { delete packagejson[pkg.save][pkg.name] if (saveBundle) { bundle = without(bundle, pkg.name) } }) Object.keys(savingTo).forEach(function (key) { packagejson[key] = deepSortObject(packagejson[key]) }) if (saveBundle) { packagejson.bundledDependencies = deepSortObject(bundle) } var json = JSON.stringify(packagejson, null, 2) + '\n' writeFileAtomic(saveTarget, json, next) })) }
exec("iconv -f gbk -t utf-8 -o " + htmlUTF8File + " " + htmlFile, function(err, stdout, stderr) { if (err) { error_handler(err, true); return; } fs.readFile(htmlUTF8File, function(err, data) { if (err) { error_handler(err, true); return; } if (!data) { error_handler('WrongHtml', true); return; } // err (wrong vc) retry mech if (data.toString().indexOf('验证码输入错误') != -1) { error_handler('WrongVC', true); return; } // err (wrong dyzh) retry mech if (data.toString().indexOf('无此导游信息') != -1) { error_handler('DYZHNotExist'); return; } // try parse html $ = cheerio.load(data.toString()); var tds = $("td"); if (tds.length < 40) { error_handler('WrongFormat', true); return; } var headPic = root + $("table.table_border_01 td table td img").attr("src").substring(1); var fields = { '姓名': 11, '导游证号': 17, '性别': 19, '资格证号': 21, '等级': 23, '导游卡号': 25, '学历': 27, '身份证号': 29, '语种': 31, '区域名称': 33, '民族': 35, '发证日期': 37, '分值': 39, '获惩日期': 41, '获惩类型': 43, '旅行社': 45, '电话': 47, '其它信息': 48 } var result = {'照片': headPic}; _.each(fields, function(id, field) { result[field] = tds.eq(id).text().trim(); }); callback(null, result); }); });
function readJson (jsonFile, opts, cb) { if (typeof cb !== "function") cb = opts, opts = {} if (cache.hasOwnProperty(jsonFile)) { log.verbose(jsonFile, "from cache") return cb(null, cache[jsonFile]) } opts.file = jsonFile if (!opts.tag) { var parsedPath = jsonFile.indexOf(npm.dir) === 0 && jsonFile.match( /\/([^\/]+)\/([^\/]+)\/package\/package\.json$/) if (parsedPath && semver.valid(parsedPath[2])) { // this is a package.json in some installed package. // infer the opts.tag so that linked packages behave right. opts.tag = parsedPath[2] } } var wscript = null , contributors = null , serverjs = null if (opts.wscript != null) { wscript = opts.wscript next() } else fs.readFile( path.join(path.dirname(jsonFile), "wscript") , function (er, data) { if (er) opts.wscript = false else opts.wscript = !!(data.toString().match(/(^|\n)def build\b/) && data.toString().match(/(^|\n)def configure\b/)) wscript = opts.wscript next() }) if (opts.contributors != null) { contributors = opts.contributors next() } else fs.readFile( path.join(path.dirname(jsonFile), "AUTHORS") , function (er, data) { if (er) opts.contributors = false else { data = data.toString().split(/\r?\n/).map(function (l) { l = l.trim().split("#").shift() return l }).filter(function (l) { return l }) opts.contributors = data } contributors = opts.contributors next() }) if (opts.serverjs != null) { serverjs = opts.serverjs next() } else fs.stat( path.join(path.dirname(jsonFile), "server.js") , function (er, st) { if (er) opts.serverjs = false else opts.serverjs = st.isFile() serverjs = opts.serverjs next() }) function next () { if (wscript === null || contributors === null || serverjs === null) { return } fs.readFile(jsonFile, processJson(opts, function (er, data) { if (er) return cb(er) var doLoad = !(jsonFile.indexOf(npm.cache) === 0 && path.basename(path.dirname(jsonFile)) !== "package") if (!doLoad) return cb(er, data) loadPackageDefaults(data, path.dirname(jsonFile), cb) })) } }
function (next) { fs.readFile(String(filepath), (enc || 'utf8'), next); }, function (contents, next) {
Array.forEach( files, function( file ) { var output = outputMap[ file ]; // Read input file var filePath = this.getAbsolutePath( file ); fs.readFile( filePath, 'utf-8', function( err, data ) { if ( err ) { this.emit( 'error', { message: 'Parse: Could not open file "' + file + '"' }); } else { // Add parsing file count this.parsePending++; this.emit( 'parseStart', file, output ); // Parse less file and generate less tree new less.Parser({ paths: this.options.include, optimization: this.options.optimization, filename: file }).parse( data, function ( err, tree ) { if ( err ) { this.emit( 'parseError', file, output ); less.writeError( err, { color: true } ); this.parsePending--; } else { var css, parseError = false; // Process less tree to css try { css = tree.toCSS({ compress: this.options.compress }); } catch ( e ) { this.emit( 'parseError', file, output ); less.writeError( e, { color: true } ); this.parsePending--; parseError = true; } // Write the output file if ( !parseError ) { mkdirp.sync(path.dirname(output)); fs.writeFile( output , css, "utf8", function( err ) { if ( err ) { this.emit( 'error', { message: 'Could not write file "' + output + '"' } ); } else { pending.erase( file ); this.emit( 'parseComplete', file, output, pending.length === 0 ); } if ( --this.parsePending === 0 ) { // All files parsed process.nextTick( function() { this.emit( 'parseCompleteAll', outputMap, options ); }.bind(this) ); } }.bind(this) ); } } }.bind(this) ); // end // Parser } }.bind(this) ); // end // Read input file }.bind(this) );
var pjson = await new Promise((resolve, reject) => fs.readFile(file, (err, source) => err ? reject(err) : resolve(source.toString())));
function readme_(file, data, rm, cb) { fs.readFile(rm, "utf8", function (er, rm) { data.readme = rm return cb(er, data) }) }
function (next) { fs.readFile(String(filepath), file.encoding(options), next); }, function (contents, next) {
module.exports = function (main, compress) { var Erowid = { config: { input: '/erowid.json', output: '/sample.json', gz: '/erowid.json.gz', compress: compress || false }, data: [] }; // Data input handler Erowid.in = function (str, fn) { console.log('> output: ' + Erowid.config.output); if (Erowid.config.compress) console.log('> compress: ' + Erowid.config.gz); console.log(); async.series([ function(seriesCb) { async.eachSeries(JSON.parse(str), function (obj, eachCb) { if (obj.id < 100 || Erowid.config.compress) { Erowid.data.push(fn(obj) || obj); } eachCb(null); }, function (err) { seriesCb(err); }); }, function (cb) { var str = JSON.stringify(Erowid.data.slice(0,100), null, 2); var both = false; fs.writeFile(process.cwd() + Erowid.config.output, str, function (err) { if (both || !Erowid.config.compress) { cb(err); } else { both = true; } }); if (Erowid.config.compress) { fs.writeFile(process.cwd() + Erowid.config.input, JSON.stringify(Erowid.data), function (err) { if (err) throw err; }); zlib.gzip(JSON.stringify(Erowid.data), function (error, result) { if (error) throw error; fs.writeFile(process.cwd() + Erowid.config.gz, result, function (err) { if (both) { cb(err); } else { both = true; } }); }); } } ]); }; console.log('> input: ' + Erowid.config.input); fs.readFile(process.cwd() + Erowid.config.input, function(inputError, data){ if (!inputError) { Erowid.in(data, main); } else { throw "No file at " + process.cwd() + Erowid.config.input; } }); };
function (next) { fs.readFile(filepath, next); },
return new Promise(resolve => { gfs.readFile(file, (err, contents) => { resolve(err ? null : contents) }) })
map(output.map.sources, function (name, i, next) { fs.readFile(name, 'utf8', next) }, function (err, sourcesContent) {
return new Observable(observer => { const argLen = args.length; if (argLen === 0) { const error = new RangeError(`${ARG_LENGTH_ERROR}, but got no arguments.`); error.code = 'ERR_MISSING_ARGS'; throw error; } if (argLen !== 1 && argLen !== 2) { throw new RangeError(`${ARG_LENGTH_ERROR}, but got ${argLen} arguments.`); } const [globPattern] = args; const readFileOptions = {}; let options = args[1]; if (argLen !== 1) { if (typeof options === 'string') { readFileOptions.encoding = options; options = {}; } else if (options !== null && typeof options === 'object') { if (options.nodir) { throw new TypeError(`read-glob doesn't support \`nodir\` option as it ignores directory entries by default, but a value ${ inspectWithKind(options.nodir) } was provided for it.`); } if (options.mark) { throw new TypeError(`read-glob doesn't support \`mark\` option as it only emits file data and there is no need to differentiate file paths and directory paths explicitly, but a value ${ inspectWithKind(options.mark) } was provided for it.`); } for (const readFileOption of READFILE_OPTIONS) { if (options[readFileOption] !== undefined) { readFileOptions.encoding = options.encoding; } } } } assertFsReadFileOption(readFileOptions); let rest = 0; function completeIfNeeded() { rest--; if (!subscription.closed) { // eslint-disable-line no-use-before-define return; } if (rest !== 0) { return; } observer.complete(); } const subscription = globObservable(globPattern, options).subscribe({ next(value) { rest++; if (value.stat && value.stat.isDirectory()) { completeIfNeeded(); return; } readFile(resolve(value.cwd, value.path), readFileOptions, (err, contents) => { if (err) { if (err.code !== 'EISDIR') { observer.error(err); return; } } else { value.contents = contents; observer.next(value); } completeIfNeeded(); }); }, error(err) { observer.error(err); }, complete() { if (rest !== 0) { return; } observer.complete(); } }); return function abortReadGlob() { subscription.unsubscribe(); }; });
return function (done) { fs.readFile(filename, 'utf8', done) }
} // end FUNCTION mkdir() /** * FUNCTION: stream( path, index, clbk ) * Takes a directory hash and calculates transforms across all hash datasets. Calculations are performed according to transform functions. * * @param {string} path - source parent data directory * @param {object} index - directory hash * @param {function} clbk - (optional) callback to invoke after finishing all streams. Function should take one input argument: [ error ]. If no errors, error is null. */ function stream( dirpath, index, clbk ) { var dirs, numDirs, files, numFiles, filepath, onRead, onFinish, counter = 0; // Get the directories: dirs = Object.keys( index ); numDirs = dirs.length; for ( var i = 0; i < numDirs; i++ ) { // Get the directory files: files = index[ dirs[ i ] ]; numFiles = files.length; onFinish = onEnd( dirs[ i ], i+1, numDirs, done ); onRead = onData( path.join( DEST, dirs[ i ] ), numFiles, onFinish ); for ( var j = 0; j < numFiles; j++ ) { // Get the file path: filepath = path.join( dirpath, dirs[ i ], files[ j ] ); // Load the data file: fs.readFile( filepath, 'utf8', onRead( j ) ); } // end FOR j } // end FOR i return; /** * FUNCTION: onData( path, total, clbk ) * Wraps parameters in an enclosure and returns a functin to enclose a file index. * * @param {string} path - output directory path * @param {number} total - total file number * @param {function} clbk - callback to invoke after calculating summary statistics * @returns {function} function to enclose a file index */ function onData( path, total, clbk ) { var DATA = new Array( total ), counter = 0; /** * FUNCTION: onData( idx ) * Returns a callback invoked upon reading a data file. Enclosing the index ensures the output data array is the same order as the input files. * * @param {number} idx - file index * @returns {function} callback to invoke after reading a data file. */ return function onData( idx ) { /** * FUNCTION: onData() * Callback to invoke after reading a data file. * * @param {object} error - error object * @param {string} data - data as a string */ return function onData( error, data ) { if ( error ) { console.error( error.stack ); throw new Error( 'stream()::unable to read file.' ); } // Append the data to our data buffer: DATA[ idx ] = JSON.parse( data ); if ( ++counter === total ) { // Send off the data to calculate transforms: streams( DATA, path, clbk ); } }; // end FUNCTION onData() }; // end FUNCTION onData() } // end FUNCTION onData() /** * FUNCTION: done() * */ function done() { if ( ++counter === numDirs ) { clbk(); } } // end FUNCTION done() } // end FUNCTION stream()
asyncMap(files, function (file, cb) { fs.readFile(file, 'utf8', function (er, data) { res[file] = data return cb(er) }) }, function (er) {
Q.allSettled(_.map(files, function (file) { var dashboardDefer = Q.defer(); // If the file doesn't end in .json, don't process it if (file.indexOf('.json') === -1) { return Q.resolve({ ignored: true }); } if (blacklistRegexp.test(file)) { return Q.resolve({ ignored: true }); } fs.readFile(file, 'utf8', function (err, dashboardData) { if (err) { if (err.code === 'EISDIR') { return; } else { throw err; } } var validateModule = function (module) { var moduleDefer = Q.defer(); var schema; try { schema = require('../schema/modules/' + module['module-type']); } catch (e) { schema = require('../schema/module'); } var result = v.validate(module, schema); if (result.errors.length > 0) { result.errors.forEach(function (err) { err.filename = file; err.module = module.slug + ' - ' + module['module-type']; }); moduleDefer.reject(result.errors); } else { if (module['module-type'] === 'tab') { return Q.all(_.map(module.tabs, validateModule)); } else { moduleDefer.resolve(); } } return moduleDefer.promise; }; dashboardData = JSON.parse(dashboardData); if (dashboardData['page-type'] !== 'dashboard') { return dashboardDefer.resolve({ ignored: true }); } else if (!dashboardData['published'] && !argh.unpublished) { return dashboardDefer.resolve({ ignored: true }); } var result = v.validate(dashboardData, dashboardSchema); if (result.errors.length > 0) { result.errors.forEach(function (err) { err.filename = file; }); dashboardDefer.reject(result.errors); } else { return Q.all(_.map(dashboardData.modules, validateModule)) .then(dashboardDefer.resolve, dashboardDefer.reject); } }); return dashboardDefer.promise.then(function (a) { if (a.ignored === undefined) console.log(('✔ ' + file).green); return a; }, function (e) { console.log(('✘ ' + file).red); throw e; }); })).then(defer.resolve);
fs.readFile(google_botkey,'utf8',(err,data)=>{ var err_flag=0; if(err){ console.log('read botkey error:'+err); writeLog('read botkey error:'+err,'error','append',0); err_flag=1; } else{ if(data!=''){ console.log('Has ori setting!'); var read_err_flag=0,err_msg=''; try{ var ori_setting = JSON.parse(data); } catch(e){ err_msg=e; read_err_flag=1; } finally{ if(read_err_flag==1){ writeLog('[getkey-get ori setting] error:'+e,'error','append',1); } else{ botkey=ori_setting['data']['bot_manager']['botkey']; id_serverip=ori_setting['data']['bot_manager']['setting']['id_serverip']; id_serverport=ori_setting['data']['bot_manager']['setting']['id_serverport']; id_server_name=ori_setting['data']['bot_manager']['setting']['seed_server_name']; id_server_version=ori_setting['data']['bot_manager']['setting']['seed_server_version']; limit_retry=ori_setting['data']['bot_manager']['setting']['limit_retry']; timeout_retryTime=ori_setting['data']['bot_manager']['setting']['timeout_retryTime']; if(typeof id_serverip==='undefined'){ console.log(google_botkey+' formal error!!!'); process.exit(0); } else{ console.log('==botkey exists==:'+botkey); } } } } else{ console.log('Need to require a new setting from bot_manager!'); err_flag=1; } } //要請求新的botkey,因為目前loca沒有記錄任何可用的botkey if(err_flag==1){ getkey((stat,err_msg)=>{ if(stat=='false'){ console.log('[getkey] err:'+err_msg); writeLog('[getkey] err:'+err_msg,'error','append',1); } else if(stat=='ok'){ console.log('==get new botkey==:'+botkey); ReadTWaddress(tw_address_filename,function(){ var now = new Date(); console.log('['+now+'] getTerms start'); getTerms(term_requireNum); }); } }); } else{ ReadTWaddress(tw_address_filename,function(){ var now = new Date(); console.log('['+now+'] getTerms start'); writeLog('['+now+'] getTerms start','process','append',0); getTerms(term_requireNum); }); } });
function getArtFromPath(path, options, cb) { fs.readFile(path, (err, data) => { if(err) { return cb(err); } // // Convert from encodedAs -> j // const ext = paths.extname(path).toLowerCase(); const encoding = options.encodedAs || defaultEncodingFromExtension(ext); // :TODO: how are BOM's currently handled if present? Are they removed? Do we need to? function sliceOfData() { if(options.fullFile === true) { return iconv.decode(data, encoding); } else { const eofMarker = defaultEofFromExtension(ext); return iconv.decode(eofMarker ? sliceAtEOF(data, eofMarker) : data, encoding); } } function getResult(sauce) { const result = { data : sliceOfData(), fromPath : path, }; if(sauce) { result.sauce = sauce; } return result; } if(options.readSauce === true) { sauce.readSAUCE(data, (err, sauce) => { if(err) { return cb(null, getResult()); } // // If a encoding was not provided & we have a mapping from // the information provided by SAUCE, use that. // if(!options.encodedAs) { /* if(sauce.Character && sauce.Character.fontName) { var enc = SAUCE_FONT_TO_ENCODING_HINT[sauce.Character.fontName]; if(enc) { encoding = enc; } } */ } return cb(null, getResult(sauce)); }); } else { return cb(null, getResult()); } }); }
return function(file, done) { var thisFileIsBinary = isBinary[path.extname(file.originalPath)]; var preprocessors = []; var nextPreprocessor = function(error, content) { // normalize B-C if (arguments.length === 1 && typeof error === 'string') { content = error; error = null; } if (error) { file.content = null; file.contentPath = null; return done(error); } if (!preprocessors.length) { file.contentPath = null; file.content = content; return done(); } preprocessors.shift()(content, file, nextPreprocessor); }; var instantiatePreprocessor = function(name) { if (alreadyDisplayedWarnings[name]) { return; } try { preprocessors.push(injector.get('preprocessor:' + name)); } catch (e) { if (e.message.indexOf('No provider for "preprocessor:' + name + '"') !== -1) { log.warn('Can not load "%s", it is not registered!\n ' + 'Perhaps you are missing some plugin?', name); } else { log.warn('Can not load "%s"!\n ' + e.stack, name); } alreadyDisplayedWarnings[name] = true; } }; // collects matching preprocessors // TODO(vojta): should we cache this ? for (var i = 0; i < patterns.length; i++) { if (mm(file.originalPath, patterns[i])) { if (thisFileIsBinary) { log.warn('Ignoring preprocessing (%s) %s because it is a binary file.', config[patterns[i]].join(', '), file.originalPath); } else { config[patterns[i]].forEach(instantiatePreprocessor); } } } return fs.readFile(file.originalPath, function(err, buffer) { if (err) { throw err; } file.sha = sha1(buffer); nextPreprocessor(null, thisFileIsBinary ? buffer : buffer.toString()); }); };
readJson(jsonPath, log.warn, function (er, data) { if (er && er.code === "ENOENT") er.code = "ENOPACKAGEJSON" if (er) return cb(er) if (opts && opts.dev) { if (!data.dependencies) data.dependencies = {} Object.keys(data.devDependencies || {}).forEach(function (k) { if (data.dependencies[k]) { log.warn("package.json", "Dependency '%s' exists in both dependencies " + "and devDependencies, using '%s@%s' from dependencies", k, k, data.dependencies[k]) } else { data.dependencies[k] = data.devDependencies[k] } }) } if (!npm.config.get("optional") && data.optionalDependencies) { Object.keys(data.optionalDependencies).forEach(function (d) { delete data.dependencies[d] }) } // User has opted out of shrinkwraps entirely if (npm.config.get("shrinkwrap") === false) return cb(null, data, null) if (wrap) { log.verbose("readDependencies: using existing wrap", [where, wrap]) var rv = {} Object.keys(data).forEach(function (key) { rv[key] = data[key] }) rv.dependencies = {} Object.keys(wrap).forEach(function (key) { log.verbose("from wrap", [key, wrap[key]]) rv.dependencies[key] = readWrap(wrap[key]) }) log.verbose("readDependencies returned deps", rv.dependencies) return cb(null, rv, wrap) } var wrapfile = path.resolve(where, "npm-shrinkwrap.json") fs.readFile(wrapfile, "utf8", function (er, wrapjson) { if (er) return cb(null, data, null) log.verbose("readDependencies", "npm-shrinkwrap.json is overriding dependencies") var newwrap try { newwrap = JSON.parse(wrapjson) } catch (ex) { return cb(ex) } log.info("shrinkwrap", "file %j", wrapfile) var rv = {} Object.keys(data).forEach(function (key) { rv[key] = data[key] }) rv.dependencies = {} Object.keys(newwrap.dependencies || {}).forEach(function (key) { rv.dependencies[key] = readWrap(newwrap.dependencies[key]) }) // fold in devDependencies if not already present, at top level if (opts && opts.dev) { Object.keys(data.devDependencies || {}).forEach(function (k) { rv.dependencies[k] = rv.dependencies[k] || data.devDependencies[k] }) } log.verbose("readDependencies returned deps", rv.dependencies) return cb(null, rv, newwrap.dependencies) }) })
fs.stat(tempFile,function(err,stats){if(err){return cb(new gutil.PluginError('gulp-jsmin',err));} options=options||{};fs.readFile(tempFile,{encoding:'UTF-8'},function(err,data){if(err){return cb(new gutil.PluginError('gulp-jsmin',err));} console.log(file);gutil.log('gulp-jsmin:',gutil.colors.green('✔ ')+file.relative);cb(null,file);});});});});};