_createInBandTestRun( testPaths, watcher, onResult, onFailure) { const mutex = throat(1); return testPaths.reduce( (promise, path) => mutex(() => promise. then(() => { if (watcher.isInterrupted()) { throw new CancelRun(); } this._dispatcher.onTestStart(this._config, path); return runTest( path, this._config, this._hasteContext.resolver); }). then(result => onResult(path, result)). catch(err => onFailure(path, err))), Promise.resolve()); }
module.exports = (markdown, progress) => { const checkLinkStatus = throat(2, (url) => rp({ uri: url }) ); var report = { linksChecked: 0, errors: [] } const matchCount = markdown.match(regex).length; progress(matchCount); const replacer = (match, url) => { report.linksChecked ++; return checkLinkStatus(url) .then(d => { progress(); return match; }) .catch(err => { progress(); if (err.statusCode == 404 || (err.cause && err.cause.code === 'ENOTFOUND')) { report.errors.push({status: '404', url: url + ' [REMOVED]'}); return ''; } else { report.errors.push({status: err.statusCode, url: url + ' [Not removed from list]'}); return match; } }); } return stringReplaceAsync(markdown, regex, replacer) .then(result => { return {content: result, report: createReport(report)}; }) }
export function bmp2png (file) { return throat(() => new Promise((resolve, reject) => { let convert = spawn('convert', [realpathSync(file.path), 'png:-']) convert.stdin.end() convert.stderr.on('data', x => process.stderr.write(x)) let data = new Promise((resolve, reject) => { convert.stdout.pipe(endpoint((err, buffer) => { if (err) { console.error('Error reading converted data!') reject(err) } else { resolve(buffer) } })) }) convert.on('close', (code) => { if (code === 0) { resolve(data.then(buffer => file.derive( basename(file.name, extname(file.name)) + '.png', buffer))) } else { console.error('Unable to convert BMP file to PNG: ' + code) reject(new Error('convert exited: ' + code)) } }) })) }
async _createInBandTestRun( tests: Array<Test>, watcher: TestWatcher, onStart: OnTestStart, onResult: OnTestSuccess, onFailure: OnTestFailure, ) { const mutex = throat(1); return tests.reduce( (promise, test) => mutex(() => promise .then(async () => { if (watcher.isInterrupted()) { throw new CancelRun(); } await onStart(test); return runTest( test.path, this._globalConfig, test.context.config, test.context.resolver, ); }) .then(result => onResult(test, result)) .catch(err => onFailure(test, err)), ), Promise.resolve(), ); }
function getVersions(functions) { return Promise.all(functions.map(throat(1, (fn) => { const params = { FunctionName: fn, }; return lambda.listVersionsByFunction(params).promise(); }))); }
function backupFunctions(functions) { return Promise.all(functions.map(throat(1, (fn) => { console.log(`Starting backup of function ${fn}`); const params = { FunctionName: fn, Description: 'Node 0.10 Deprecation Blueprint Backup', }; return lambda.publishVersion(params).promise(); }))); }
function upgradeFunctions(functions) { return Promise.all(functions.map(throat(1, (fn) => { console.log(`Starting runtime upgrade of function ${fn}`); const params = { FunctionName: fn, Runtime: targetRuntime, }; return lambda.updateFunctionConfiguration(params).promise(); }))); }
constructor({ extensions, fastfs, moduleCache, preferNativePlatform, helpers, platforms, }) { super(); this._extensions = extensions; this._fastfs = fastfs; this._moduleCache = moduleCache; this._preferNativePlatform = preferNativePlatform; this._helpers = helpers; this._platforms = platforms; this._processHastePackage = throat(1, this._processHastePackage.bind(this)); this._processHasteModule = throat(1, this._processHasteModule.bind(this)); }
_createParallelTestRun( testPaths, watcher, onResult, onFailure) { const config = this._config; const farm = workerFarm({ autoStart: true, maxConcurrentCallsPerWorker: 1, maxConcurrentWorkers: this._options.maxWorkers, maxRetries: 2 }, TEST_WORKER_PATH); const mutex = throat(this._options.maxWorkers); // Send test suites to workers continuously instead of all at once to track // the start time of individual tests. const runTestInWorker = (_ref) => {let path = _ref.path,config = _ref.config;return mutex(() => { if (watcher.isInterrupted()) { return Promise.reject(); } this._dispatcher.onTestStart(config, path); return promisify(farm)({ config, path }); });}; const onError = (err, path) => { onFailure(path, err); if (err.type === 'ProcessTerminatedError') { console.error( 'A worker process has quit unexpectedly! ' + 'Most likely this an initialization error.'); process.exit(1); } }; const onInterrupt = new Promise((_, reject) => { watcher.on('change', state => { if (state.interrupted) { reject(new CancelRun()); } }); }); const runAllTests = Promise.all(testPaths.map(path => { return runTestInWorker({ config, path }). then(testResult => onResult(path, testResult)). catch(error => onError(error, path)); })); return Promise.race([ runAllTests, onInterrupt]). then(() => workerFarm.end(farm)); }
return plugin._getUploadFiles(distDir, filePattern).then(function(files) { var uploader = function(f){ return plugin._uploadFile(sentrySettings, distDir, f); }; return Promise.all(files.map(throat(5, uploader))).then(function() { return plugin._getReleaseFiles(sentrySettings); }).then(function(response) { plugin.log('Files known to sentry for this release', { verbose: true }); for (var i=0 ; i<response.length ; i++) { plugin.log('✔ ' + response[i].name, { verbose: true }); } }); });
gulp.task('download-wallpapers', async function() { const client = new S3Photos(); const photos = await client.list(); // ensure we have the wallpapers dir await mkdirp(__dirname + '/wallpapers'); const downloadPromises = photos.map(throat(10, async function(photo) { const localPath = __dirname + '/wallpapers/' + photo; const time = timer(); await client.download(photo, localPath); gutil.log(`Downloaded ${photo}`, `${time()} ms`); })); await Promise.all(downloadPromises); });
}).then(function (list) { var apiLimitExceeded = false; var results = list.map(throat(10, function (el) { var deferred = Q.defer(); var re = /github\.com\/([\w\-\.]+)\/([\w\-\.]+)/i; var parsedUrl = re.exec(el.url.replace(/\.git$/, '')); // only return components from github if (!parsedUrl) { deferred.resolve(); return deferred.promise; } var user = parsedUrl[1]; var repo = parsedUrl[2]; var apiUrl = 'https://api.github.com/repos/' + user + '/' + repo; request.get(apiUrl, { json: true, qs: { client_id: process.env.GITHUB_CLIENT_ID, client_secret: process.env.GITHUB_CLIENT_SECRET }, headers: { 'User-Agent': 'Node.js' }, timeout: 60000 }, function (err, response, body) { if (!err && body && /API Rate Limit Exceeded/.test(body.message)) { apiLimitExceeded = true; deferred.resolve(); } else if (body && /Repository access blocked/.test(body.message)) { deferred.resolve(); } else if (!err && response.statusCode === 200) { var complete = function (keywords) { if (fetchNew === true) { cachedResults.push(createComponentData(el.name, body, keywords)); } deferred.resolve(createComponentData(el.name, body, keywords)); }; fetchKeywords(user, repo, 'bower.json', function (err, keywords) { if (err) { fetchKeywords(user, repo, 'package.json', function (err, keywords) { complete(keywords); }); return; } complete(keywords); }); } else { if (response && response.statusCode === 404) { deferred.resolve(); } else { console.log('err github fetch', el.name, response.statusCode, err, body); deferred.resolve(); } } return deferred.promise; }); return deferred.promise; })); if (apiLimitExceeded) { console.log('API limit exceeded. Using cached GitHub results.'); return Q.all(cachedResults); } if (fetchNew === false) { cachedResults = results; } console.log('Finished fetching data from Bower registry', '' + new Date()); return Q.all(fetchNew === true ? cachedResults.concat(results) : results); });
if (uri == null) { return null; } return await this._fetchResultFromURI(uri); } store(props: FetchProps, result: CachedResult) { if (this._store != null) { this._store.store(this.keyOf(props), result); } } } URIBasedGlobalTransformCache.fetchResultFromURI = throat(500, URIBasedGlobalTransformCache._fetchResultFromURIWithRetry); class OptionsHasher { _rootPath: string; _cache: WeakMap<TransformWorkerOptions, string>; constructor(rootPath: string) { this._rootPath = rootPath; this._cache = new WeakMap(); } getTransformWorkerOptionsDigest(options: TransformWorkerOptions): string { const digest = this._cache.get(options); if (digest != null) { return digest; }
var popsicleProxy = require('popsicle-proxy-agent'); var Throat = require('throat'); var promise_finally_1 = require('promise-finally'); var Touch = require('touch'); var path_1 = require('path'); var url_1 = require('url'); var template = require('string-template'); var config_1 = require('./config'); var path_2 = require('./path'); var references_1 = require('./references'); var rc_1 = require('./rc'); var store_1 = require('./store'); var debug_1 = require('./debug'); var pkg = require('../../package.json'); var registryURL = url_1.parse(rc_1.default.registryURL); var throat = Throat(Promise); exports.touch = throat(10, thenify(Touch)); exports.stat = throat(10, thenify(fs.stat)); exports.readFile = throat(10, thenify(fs.readFile)); exports.writeFile = thenify(fs.writeFile); exports.mkdirp = throat(10, thenify(Mkdirp)); exports.unlink = throat(10, thenify(fs.unlink)); exports.lock = throat(10, thenify(lockfile.lock)); exports.unlock = throat(10, thenify(lockfile.unlock)); exports.rimraf = throat(10, thenify(Rimraf)); function isFile(path) { return exports.stat(path).then(function (stat) { return stat.isFile(); }, function () { return false; }); } exports.isFile = isFile; function readJson(path, allowEmpty) { return exports.readFile(path, 'utf8')
* This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow */ import type {Path} from 'types/Config'; import type {ChangedFilesPromise, Options, Repos} from 'types/ChangedFiles'; import git from './git'; import hg from './hg'; import throat from 'throat'; // This is an arbitrary number. The main goal is to prevent projects with // many roots (50+) from spawning too many processes at once. const mutex = throat(5); const findGitRoot = dir => mutex(() => git.getRoot(dir)); const findHgRoot = dir => mutex(() => hg.getRoot(dir)); export const getChangedFilesForRoots = async ( roots: Array<Path>, options: Options, ): ChangedFilesPromise => { const repos = await findRepos(roots); const changedFilesOptions = Object.assign({}, {includePaths: roots}, options); const gitPromises = Array.from(repos.git).map(repo => git.findChangedFiles(repo, changedFilesOptions), );
_uploadFileList: function uploadFileList(files) { this.log('Beginning upload.', {verbose: true}); return RSVP.all(files.map(throat(5, this._uploadFile.bind(this)))) .then(this._getReleaseFiles.bind(this)); },
.then(files => { let allFunctions = flatmap(files, file => file.funcs.concat(file.subs)) .filter(fn => blacklistedFunctions.indexOf(fn.name) == -1) return files.map(throat(1, file => analyzer(baseDir, file.path, allFunctions))) })
.then(files => files .filter(file => blacklistedFiles.indexOf(file.toLocaleLowerCase()) == -1) .map(throat(1, file => analyzer(baseDir, file)))
import CordovaPromiseFS from 'cordova-promise-fs'; import CordovaFileCache from 'cordova-file-cache'; import throater from 'throat'; import DomNode from './DomNode'; const throat = throater(Promise); export default function cordovaLoad(serverRoot, forceReload, manifest, onProgress) { const files = Object.keys(manifest.files).map(fileName => manifest.files[fileName]) .filter(file => file && file.type !== 'css') .map(({ path }) => path); const cache = new CordovaFileCache({ fs: new CordovaPromiseFS({ Promise }), mode: 'mirror', serverRoot }); window.cordovaFileCache = { get: (path) => { return cache.get(path).replace(/^file:\/\//, ''); } }; return cache.ready.then(() => { if (forceReload) { return cache.clear(); } return Promise.resolve(); }).then(() => {
(function(global) { "use strict"; var Configfile = require('config') , fs = require('fs') , globule = require('globule') , JSONStream = require('JSONStream') , NCMB = require('ncmb') , program = require('commander') , Converter = require('./lib/converter') , throat = require('throat') ; // handling command-line program.version('0.0.1') .usage('[options] <directory>') .option('-c, --concurrency <number>', 'Set parallel concurrency', parseInt) .parse(process.argv); if (!program.args.length) { program.help(); return; } var concurrency = 3; // default if (program.concurrency != undefined) { concurrency = program.concurrency; } let targetDir = program.args[0]; // trim tailing '/' if (targetDir[targetDir.length - 1] == '/') { targetDir = targetDir.slice(0, -1); } let files = globule.find(targetDir + '/*'); // initialize NCMB let app_key = Configfile.config.app_key; let client_key = Configfile.config.client_key; var ncmb = new NCMB(app_key, client_key); var Parallel = throat(Promise)(concurrency); var typemap = { '_Installation.json': 'installation' , '_Product.json': 'product' , '_Role.json': 'role' , '_User.json': 'user' } files.forEach(function(path) { let file = path.replace(/.*\//, ''); let type, name; let joinPrefix = '_Join:'; if (file.substr(0, joinPrefix.length) === joinPrefix) { type = 'join'; name = file; } else if (typemap[file] !== undefined) { type = typemap[file]; name = ''; } else { type = 'object'; let match = file.match(/(.*)\.json$/); if (match === null) { return; } name = match[1]; } var converter = new Converter(ncmb, type, name); fs.createReadStream(path) .pipe(JSONStream.parse('results.*')) .on('data', function(data) { Parallel(function() { return converter.convert(data) }) .then(function(results) { // something on success? }) .catch(function(err) { console.log(err); }) ; }); }); return; })((this || 0).self || global);
_createParallelTestRun( testPaths: Array<Path>, watcher: TestWatcher, onResult: OnTestResult, onFailure: OnRunFailure, ) { const config = this._config; const farm = workerFarm({ autoStart: true, maxConcurrentCallsPerWorker: 1, maxConcurrentWorkers: this._options.maxWorkers, maxRetries: 2, // Allow for a couple of transient errors. }, TEST_WORKER_PATH); const mutex = throat(this._options.maxWorkers); const worker = promisify(farm); // Send test suites to workers continuously instead of all at once to track // the start time of individual tests. const runTestInWorker = ({config, path}) => mutex(() => { if (watcher.isInterrupted()) { return Promise.reject(); } this._dispatcher.onTestStart(config, path); return worker({ config, path, rawModuleMap: watcher.isWatchMode() ? this._hasteContext.moduleMap.getRawModuleMap() : null, }); }); const onError = (err, path) => { onFailure(path, err); if (err.type === 'ProcessTerminatedError') { console.error( 'A worker process has quit unexpectedly! ' + 'Most likely this an initialization error.', ); process.exit(1); } }; const onInterrupt = new Promise((_, reject) => { watcher.on('change', state => { if (state.interrupted) { reject(new CancelRun()); } }); }); const runAllTests = Promise.all(testPaths.map(path => { return runTestInWorker({config, path}) .then(testResult => onResult(path, testResult)) .catch(error => onError(error, path)); })); const cleanup = () => workerFarm.end(farm); return Promise.race([ runAllTests, onInterrupt, ]).then(cleanup, cleanup); }
(function(global) { "use strict"; var Configfile = require('config') , fs = require('fs') , globule = require('globule') , JSONStream = require('JSONStream') , NCMB = require('ncmb') , program = require('commander') , Converter = require('./lib/converter') , ObjMapper = require('./lib/objmapper') , PointerQueue = require('./lib/pointerqueue') , PointerSaver = require('./lib/pointersaver') , throat = require('throat') ; // handling command-line program.version('0.0.1') .usage('[options] <directory>') .option('-c, --concurrency <number>', 'Set parallel concurrency', parseInt) .option('-p, --phase <number>', 'Specify phase number', parseInt) .option('-s, --silent', 'Silent mode') .parse(process.argv); if (!program.args.length) { program.help(); return; } var concurrency = 3; // default if (program.concurrency != undefined) { concurrency = program.concurrency; } if (program.phase === undefined) { console.error('Phase number required'); process.exit(1); } if (program.phase != 1 && program.phase != 2) { console.error('Invalid phase number'); process.exit(1); } let targetDir = program.args[0]; // trim tailing '/' if (targetDir[targetDir.length - 1] == '/') { targetDir = targetDir.slice(0, -1); } let files = globule.find(targetDir + '/*'); let getPathInfo = function(path) { let typemap = { '_Installation.json': 'installation' , '_Product.json': 'product' , '_Role.json': 'role' , '_User.json': 'user' }; let joinPrefix = '_Join:'; let type = null; let name = null; let file = path.replace(/.*\//, ''); if (file.substr(0, joinPrefix.length) === joinPrefix) { // join type type = 'join'; let match = file.match(/^_Join:(.*)\.json$/); name = match[1]; } else if (typemap[file] !== undefined) { // preset types type = typemap[file]; let match = file.match(/(.*)\.json$/); name = match[1]; } else { // other type = 'object'; let match = file.match(/(.*)\.json$/); if (match === null) { type = null; } else { name = match[1]; } } return { path: path , file: file , type: type , name: name }; } var objFiles = [], relFiles = []; files.forEach(function(path) { let info = getPathInfo(path); if (info.type === null) { return; } if (info.type == 'join') { relFiles.push(info); } else { objFiles.push(info); } }); // initialize NCMB let app_key = Configfile.config.app_key; let client_key = Configfile.config.client_key; var ncmb = new NCMB(app_key, client_key); var objMapper = new ObjMapper; var pointerQueue = new PointerQueue; var pointerSaver = new PointerSaver(ncmb, objMapper); var Parallel = throat(Promise)(concurrency); let targetFiles = null; if (program.phase == 1) { // Phase 1: store objects targetFiles = objFiles; objMapper.reset(); pointerQueue.reset(); } else { // Phase 2: store relations and others targetFiles = relFiles; objMapper.ensureIndex(); retrievePointer(pointerQueue, pointerSaver); } targetFiles.forEach(function(info) { var converter = new Converter(ncmb, info.type, info.name, objMapper, pointerQueue); fs.createReadStream(info.path) .pipe(JSONStream.parse('results.*')) .on('data', function(data) { Parallel(function() { return converter.convert(data) }) .catch(function(err) { console.error(err); }); }) .on('end', function() { console.log(info.name + ' done.'); }); }); function retrievePointer(pointerQueue, pointerSaver) { pointerQueue .all() .then(function(values) { values.forEach(function(val) { pointerSaver.retrieve(val); }); }) .catch(function(err) { console.error(err); }); } })((this || 0).self || global);
async _createParallelTestRun( tests: Array<Test>, watcher: TestWatcher, onStart: OnTestStart, onResult: OnTestSuccess, onFailure: OnTestFailure, ) { // $FlowFixMe: class object is augmented with worker when instantiating. const worker: WorkerInterface = new Worker(TEST_WORKER_PATH, { exposedMethods: ['worker'], maxRetries: 3, numWorkers: this._globalConfig.maxWorkers, }); const mutex = throat(this._globalConfig.maxWorkers); // Send test suites to workers continuously instead of all at once to track // the start time of individual tests. const runTestInWorker = test => mutex(async () => { if (watcher.isInterrupted()) { return Promise.reject(); } await onStart(test); return worker.worker({ config: test.context.config, globalConfig: this._globalConfig, path: test.path, rawModuleMap: watcher.isWatchMode() ? test.context.moduleMap.getRawModuleMap() : null, }); }); const onError = async (err, test) => { await onFailure(test, err); if (err.type === 'ProcessTerminatedError') { console.error( 'A worker process has quit unexpectedly! ' + 'Most likely this is an initialization error.', ); process.exit(1); } }; const onInterrupt = new Promise((_, reject) => { watcher.on('change', state => { if (state.interrupted) { reject(new CancelRun()); } }); }); const runAllTests = Promise.all( tests.map(test => runTestInWorker(test) .then(testResult => onResult(test, testResult)) .catch(error => onError(error, test)), ), ); const cleanup = () => worker.end(); return Promise.race([runAllTests, onInterrupt]).then(cleanup, cleanup); }