it('handles correct inputs and types return value', () => { (pMap( [1, 2, Promise.resolve(3)], (el: number, index: number) => (el + index).toString(), { concurrency: 2 }, ): Promise<Array<string>>); (pMap(['a', 'b', Promise.resolve(3)], () => null): Promise<Array<null>>); });
// Fetch log graphs in parallel static fetchParallel (ipfs, hashes, length, exclude = [], concurrency, timeout, onProgressCallback) { const fetchOne = (hash) => EntryIO.fetchAll(ipfs, hash, length, exclude, timeout, onProgressCallback) const concatArrays = (arr1, arr2) => arr1.concat(arr2) const flatten = (arr) => arr.reduce(concatArrays, []) return pMap(hashes, fetchOne, { concurrency: Math.max(concurrency || hashes.length, 1) }) .then(flatten) // Flatten the results }
run(context) { this.render(); context = context || Object.create(null); const errors = []; this._checkAll(context); const tasks = pMap(this._tasks, task => { this._checkAll(context); return runTask(task, context, errors); }, {concurrency: this.concurrency}); return tasks .then(() => { if (errors.length > 0) { const err = new ListrError('Something went wrong'); err.errors = errors; throw err; } this._renderer.end(); return context; }) .catch(error => { error.context = context; this._renderer.end(error); throw error; }); }
return getFreePorts(PORT, scenarioViews.length).then(freeports => { console.log('These ports will be used:', JSON.stringify(freeports)); scenarioViews.forEach((scenarioView, i) => { scenarioView.assignedPort = freeports[i]; }); return pMap(scenarioViews, runChromy, { concurrency: asyncCaptureLimit }); });
function queueRunner(options: Options) { const mapper = ({fn, timeout}) => { const promise = new Promise(resolve => { const next = once(resolve); next.fail = function() { options.fail.apply(null, arguments); resolve(); }; try { fn.call(options.userContext, next); } catch (e) { options.onException(e); resolve(); } }); if (!timeout) { return promise; } return pTimeout( promise, timeout(), options.clearTimeout, options.setTimeout, () => { const error = new Error( 'Timeout - Async callback was not invoked within timeout specified ' + 'by jasmine.DEFAULT_TIMEOUT_INTERVAL.', ); options.onException(error); }, ); }; return pMap(options.queueableFns, mapper, {concurrency: 1}); }
module.exports = postcss.plugin('postcss-inline-base64', (opts = {}) => (css, result) => { const {to = '.'} = result.opts const options = {...{baseDir: dirname(to)}, ...opts} info(options) const inlines = [] css.walkDecls(/^background(-image)?$|^src$/, decl => { const matches = decl.value.match(b64Regx) || [] for (const match of matches) { const file = match.replace(b64Regx, '$1') inlines.push({file, match, decl}) } }) return pMap(inlines, async ({file, match, decl}) => { const node = decl.parent let data = file try { data = await _inline(options.baseDir, file) } catch (err) { node.warn(result, err.message) error(err.message) } finally { decl.value = decl.value.replace(match, data) } }) })
function removeTempLicenses(packagesToBeLicensed) { if (!packagesToBeLicensed.length) { return Promise.resolve(); } return pMap(packagesToBeLicensed, pkg => fs.remove(pkg.licensePath)); }
async putFiles(files: Array<{ local: string, remote: string }>, givenConfig: Object = {}): Promise<void> { invariant(this.connection, 'Not connected to server') invariant(Array.isArray(files), 'files must be an array') for (let i = 0, { length } = files; i < length; ++i) { const file = files[i] invariant(file, 'files items must be valid objects') invariant(file.local && typeof file.local === 'string', `files[${i}].local must be a string`) invariant(file.remote && typeof file.remote === 'string', `files[${i}].remote must be a string`) } const transferred = [] const config = Helpers.normalizePutFilesOptions(givenConfig) const sftp = config.sftp || (await this.requestSFTP()) try { await pMap(files, async file => { await this.putFile(file.local, file.remote, sftp, config.sftpOptions) transferred.push(file) }) } catch (error) { error.transferred = transferred throw error } finally { if (!sftp) { sftp.end() } } }
runScriptInPackagesLexical() { const runner = this.options.stream ? pkg => this.runScriptInPackageStreaming(pkg) : pkg => this.runScriptInPackageCapturing(pkg); return pMap(this.packagesWithScript, runner, { concurrency: this.concurrency }); }
const runAllTasks = () => { if (sequential) { return pEachSeries(allTasks, e => pEachSeries(e.tasks, writeToDB)) .then(() => console.log()) } else { return pMap(allTasks, e => pEachSeries(e.tasks, writeToDB)) .then(() => console.log()) } }
async invokeIssueReporters(issue: any): Promise<void> { const issueReporters = this.getComponents('issue-reporter') if (!issueReporters.length) { console.error('No Issue Reporters found to report this issue:', issue) return } await pMap(issueReporters, issueReporter => issueReporter.callback({ issue, context: this, }), ) }
module.exports = function (config) { var compareConfig = require(config.tempCompareConfigFileName).compareConfig; var report = new Reporter(config.ciReport.testSuiteName); return map(compareConfig.testPairs, function (pair) { var Test = report.addTest(pair); var referencePath = path.join(config.projectPath, pair.reference); var testPath = path.join(config.projectPath, pair.test); return compareImage(referencePath, testPath, config.resembleOutputOptions) .then(function logCompareResult (data) { pair.diff = data; if ((pair.requireSameDimensions === false || data.isSameDimensions === true) && data.misMatchPercentage <= pair.misMatchThreshold) { Test.status = 'pass'; logger.success('OK: ' + pair.label + ' ' + pair.fileName); data = null; pair.diff.getDiffImage = null; return pair; } Test.status = 'fail'; if (data instanceof Error) { logger.error('ERROR ' + data.message + ': ' + pair.label + ' ' + pair.fileName); pair.error = data; return pair; } else { logger.error('ERROR { requireSameDimensions: ' + (data.requireSameDimensions ? 'true' : 'false') + ' size: ' + (data.isSameDimensions ? 'ok' : 'isDifferent') + ', content: ' + data.misMatchPercentage + '%, threshold: ' + pair.misMatchThreshold + '% }: ' + pair.label + ' ' + pair.fileName); } return storeFailedDiffImage(testPath, data).then(function (compare) { pair.diffImage = compare; data = null; pair.diff.getDiffImage = null; return pair; }); }); }, { concurrency: config.asyncCompareLimit || ASYNC_COMPARE_LIMIT }).then(function () { return report; }, function (e) { logger.error('The comparison failed with error: ' + e); }); };
async transformChunk({ job, chunk, locks, tickCallback, changedImports = new Set(), }: { job: Job, chunk: Chunk, locks: Set<string>, tickCallback?: ?TickCallback, changedImports?: Set<string>, }): Promise<void> { const lockKey = getChunkKey(chunk) if (locks.has(lockKey)) { return } if (job.chunks.has(lockKey) && !changedImports.size) { return } locks.add(lockKey) try { job.chunks.set(lockKey, chunk) const filesToProcess = chunk.imports.slice() if (chunk.filePath) { filesToProcess.push({ meta: chunk.meta, format: chunk.format, filePath: chunk.filePath, }) } await pMap(filesToProcess, fileImport => this.transformFileTree({ job, locks, request: fileImport, tickCallback, changedImports, }), ) } catch (error) { job.chunks.delete(lockKey) throw error } }
value: function fetchParallel(ipfs, hashes, length) { var exclude = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : []; var concurrency = arguments[4]; var timeout = arguments[5]; var onProgressCallback = arguments[6]; var fetchOne = function fetchOne(hash) { return EntryIO.fetchAll(ipfs, hash, length, exclude, timeout, onProgressCallback); }; var concatArrays = function concatArrays(arr1, arr2) { return arr1.concat(arr2); }; var flatten = function flatten(arr) { return arr.reduce(concatArrays, []); }; return pMap(hashes, fetchOne, { concurrency: Math.max(concurrency || hashes.length, 1) }).then(flatten // Flatten the results ); }
async generate(job: Job, chunks: Array<Chunk> = Array.from(job.chunks.values())): Promise<ChunksGenerated> { const { directory, outputs } = await this.context.invokeChunkGenerators(this, { job, chunks }) const transformedOutputs = await pMap(outputs, async output => { const generated = await this.transformChunkGenerated(output) const sourceMap = output.sourceMap || generated.sourceMap ? { ...output.sourceMap, ...generated.sourceMap, } : null return { ...output, ...generated, sourceMap, } }) return { directory, outputs: transformedOutputs } }
module.exports.batch = async (addrs, lists, opts = {}) => { opts = Object.assign({}, defaults, opts); const items = []; (Array.isArray(addrs) ? addrs : [addrs]).forEach(address => { (Array.isArray(lists) ? lists : [lists]).forEach(blacklist => { const resolver = new Resolver(); resolver.setServers(Array.isArray(opts.servers) ? opts.servers : [opts.servers]); items.push({blacklist, address, resolver}); }); }); const results = await pMap(items, item => { return query(item.address, item.blacklist, item.resolver, opts); }, {concurrency: opts.concurrency}); return items.map((item, i) => { item.listed = results[i]; delete item.resolver; return item; }); };
async execute(): Promise<ChunksGenerated> { const job = new Job() const configChunks = (await Promise.all( this.context.config.entry.map(entry => this.resolveStrict({ meta: null, request: entry, requestFile: null, ignoredResolvers: [], }), ), )).map(resolved => getChunk(resolved.format, null, resolved.filePath, [], true, false, resolved.meta)) await pMap(configChunks, chunk => this.transformChunk({ job, chunk, locks: new Set(), }), ) return this.generate(await this.transformJob(job)) }
function slowSearchHandler(req, res, next) { const matchingUsers = []; userList.forEach((user) => { if (req.filter.matches(user.attributes)) { matchingUsers.push(user); } }); // send with slow times promiseMap( matchingUsers, user => new Promise((resolve) => { setTimeout(() => { res.send(user); return resolve(); }, 1000); }), ).then(() => { res.end(); next(); }); next(); }
return fs.readdir(src, (err, files) => { if (err) { console.log(err.stack); reject(); } console.log('The following files will be promoted to reference...'); return map(files, (file) => { if (FAILED_DIFF_RE.test(file)) { file = file.replace(FAILED_DIFF_RE, ''); let imageFilter = FILTER_DEFAULT; if (config.args && config.args.filter) { imageFilter = new RegExp(config.args.filter); } if (imageFilter.test(file)) { console.log('> ', file); return fs.copy(path.join(src, file), path.join(config.bitmaps_reference, file)); } } return true; }).then(resolve).catch(reject); });
const verifyEntries = async (entries) => { const isTrue = e => e === true const getPubKey = e => e.getPublic ? e.getPublic('hex') : e const checkAllKeys = (keys, entry) => { const keyMatches = e => e === entry.key return keys.find(keyMatches) } const pubkeys = this._keys.map(getPubKey) const verify = async (entry) => { if (!entry.key) throw new Error("Entry doesn't have a public key") if (!entry.sig) throw new Error("Entry doesn't have a signature") if (this._keys.length === 1 && this._keys[0] === this._key ) { if (entry.id !== this.id) throw new Error("Entry doesn't belong in this log (wrong ID)") } if (this._keys.length > 0 && !this._keys.includes('*') && !checkAllKeys(this._keys.concat([this._key]), entry)) { console.warn("Warning: Input log contains entries that are not allowed in this log. Logs weren't joined.") return false } try { await Entry.verifyEntry(entry, this._keystore) } catch (e) { throw new Error(`Invalid signature in entry '${entry.hash}'`) } return true } const checked = await pMap(entries, verify) return checked.every(isTrue) }
/** * Join two logs * * @description Joins two logs returning a new log. Doesn't mutate the original logs. * * @param {IPFS} [ipfs] An IPFS instance * @param {Log} log Log to join with this Log * @param {Number} [size] Max size of the joined log * @param {string} [id] ID to use for the new log * * @example * await log1.join(log2) * * @returns {Promise<Log>} */ async join (log, size = -1) { if (!isDefined(log)) throw LogError.LogNotDefinedError() if (!Log.isLog(log)) throw LogError.NotALogError() if (this.id !== log.id) return // Get the difference of the logs const newItems = Log.difference(log, this) const identityProvider = this._identity.provider // Verify if entries are allowed to be added to the log and throws if // there's an invalid entry const permitted = async (entry) => { const canAppend = await this._access.canAppend(entry, identityProvider) if (!canAppend) { throw new Error(`Could not append entry, key "${entry.identity.id}" is not allowed to write to the log`) } } // Verify signature for each entry and throws if there's an invalid signature const verify = async (entry) => { const isValid = await Entry.verify(identityProvider, entry) const publicKey = entry.identity ? entry.identity.publicKey : entry.key if (!isValid) throw new Error(`Could not validate signature "${entry.sig}" for entry "${entry.hash}" and key "${publicKey}"`) } const entriesToJoin = Object.values(newItems) await pMap(entriesToJoin, permitted, { concurrency: 1 }) await pMap(entriesToJoin, verify, { concurrency: 1 }) // Update the internal next pointers index const addToNextsIndex = e => { const entry = this.get(e.hash) if (!entry) this._length++ /* istanbul ignore else */ e.next.forEach(a => (this._nextsIndex[a] = e.hash)) } Object.values(newItems).forEach(addToNextsIndex) // Update the internal entry index this._entryIndex = Object.assign(this._entryIndex, newItems) // Merge the heads const notReferencedByNewItems = e => !nextsFromNewItems.find(a => a === e.hash) const notInCurrentNexts = e => !this._nextsIndex[e.hash] const nextsFromNewItems = Object.values(newItems).map(getNextPointers).reduce(flatMap, []) const mergedHeads = Log.findHeads(Object.values(Object.assign({}, this._headsIndex, log._headsIndex))) .filter(notReferencedByNewItems) .filter(notInCurrentNexts) .reduce(uniqueEntriesReducer, {}) this._headsIndex = mergedHeads // Slice to the requested size if (size > -1) { let tmp = this.values tmp = tmp.slice(-size) this._entryIndex = tmp.reduce(uniqueEntriesReducer, {}) this._headsIndex = Log.findHeads(tmp) this._length = Object.values(this._entryIndex).length } // Find the latest clock from the heads const maxClock = Object.values(this._headsIndex).reduce(maxClockTimeReducer, 0) this._clock = new Clock(this.clock.id, Math.max(this.clock.time, maxClock)) return this }
const authorName = argv.n || argv['author-name'] const authorUrl = argv.U || argv['author-url'] if (typeof per !== 'number' || typeof unit !== 'string') { throw new TypeError('`per` must be `number`, `unit` must be `string`') } const github = new GitHub() const getEvents = page => github.activity.getEventsForUser({ per_page: 100, username, page, }) pMap([1, 2, 3], page => getEvents(page).then(res => res.data)) .then(results => { const target = moment().subtract(per, unit) const targetMonth = `${target.month() + 1}` const nextMonth = `${moment().month() + 1}` const targetPaddedMonth = padStart(targetMonth, 2, '0') const nextPaddedMonth = padStart(nextMonth, 2, '0') const targetDate = `${target.year()}${targetPaddedMonth}01` const nextDate = `${moment().year()}${nextPaddedMonth}01` const targetM = moment(targetDate) const nextM = moment(nextDate) const laterThisMonthData = flatten(results).filter(d => { const val = moment(d.created_at).valueOf() return nextM.valueOf() > val && val > targetM.valueOf() })
it('errors when wrong return type', () => { // $ExpectError (pMap([1, 2, Promise.resolve(3)], (el: number) => el): Promise< Array<string>, >); });
return fs.mkdirp(destPackage.binLocation).then(() => pMap(actions, meta => { if (meta) { return createSymlink(meta.src, meta.dst, "exec").then(() => fs.chmod(meta.src, "755")); } })
runScriptInPackagesParallel() { return pMap(this.packagesWithScript, pkg => this.runScriptInPackageStreaming(pkg)); }
async putDirectory(localDirectory: string, remoteDirectory: string, givenConfig: Object = {}): Promise<boolean> { invariant(this.connection, 'Not connected to server') invariant(typeof localDirectory === 'string' && localDirectory, 'localDirectory must be a string') invariant(typeof remoteDirectory === 'string' && remoteDirectory, 'remoteDirectory must be a string') invariant(await Helpers.exists(localDirectory), `localDirectory does not exist at ${localDirectory}`) invariant((await Helpers.stat(localDirectory)).isDirectory(), `localDirectory is not a directory at ${localDirectory}`) invariant(typeof givenConfig === 'object' && givenConfig, 'config must be an object') const config = Helpers.normalizePutDirectoryOptions(givenConfig) const sftp = config.sftp || (await this.requestSFTP()) const scanned = await scanDirectory(localDirectory, config.recursive, config.validate) const files = scanned.files.map(i => Path.relative(localDirectory, i)) const directories = scanned.directories.map(i => Path.relative(localDirectory, i)) let failed = false let directoriesQueue = Promise.resolve() const directoriesCreated = new Set() const createDirectory = async path => { if (!directoriesCreated.has(path)) { directoriesCreated.add(path) directoriesQueue = directoriesQueue.then(() => this.mkdir(path, 'sftp', sftp)) await directoriesQueue } } try { await pMap( files, async file => { const localFile = Path.join(localDirectory, file) const remoteFile = Path.join(remoteDirectory, file) .split(Path.sep) .join('/') const remoteFileDirectory = Path.dirname(remoteFile) await createDirectory(remoteFileDirectory) try { await this.putFile(localFile, remoteFile, sftp, config.sftpOptions) config.tick(localFile, remoteFile, null) } catch (_) { failed = true config.tick(localFile, remoteFile, _) } }, { concurrency: config.concurrency }, ) await pMap( directories, async function(entry) { const remoteEntry = Path.join(remoteDirectory, entry) .split(Path.sep) .join('/') await createDirectory(remoteEntry) }, { concurrency: config.concurrency }, ) } finally { if (!config.sftp) { sftp.end() } } return !failed }
async transformFileTree({ job, locks, request, tickCallback, changedImports = new Set(), }: { job: Job, locks: Set<string>, request: ImportResolved, tickCallback?: ?TickCallback, changedImports?: Set<string>, }): Promise<void> { const lockKey = getFileKey(request) const fileChanged = changedImports.has(lockKey) const oldFile = job.files.get(lockKey) if (locks.has(lockKey)) { return } locks.add(lockKey) let cachedFile = null if (!fileChanged) { cachedFile = await this.cache.getFile(request) } let newFile if (oldFile && !fileChanged) { newFile = oldFile } else if (cachedFile && !fileChanged) { newFile = cachedFile job.files.set(lockKey, newFile) } else { changedImports.delete(lockKey) newFile = await this.transformFile(request) job.files.set(lockKey, newFile) this.cache.setFile(request, newFile) } await Promise.all([ pMap(newFile.imports, fileImport => this.transformFileTree({ job, locks, request: fileImport, tickCallback, changedImports, }), ), pMap(newFile.chunks, fileChunk => this.transformChunk({ job, chunk: fileChunk, locks, tickCallback, changedImports, }), ), ]) if (oldFile !== newFile && cachedFile !== newFile && tickCallback) { await tickCallback(oldFile, newFile) } }
function delegateScenarios (config) { // TODO: start chromy here? Or later? maybe later because maybe changing resolutions doesn't work after starting? // casper.start(); var scenarios = []; var scenarioViews = []; config.viewports.forEach(saveViewportIndexes); // casper.each(scenarios, function (casper, scenario, i) { config.scenarios.forEach(function (scenario, i) { // var scenarioLabelSafe = makeSafe(scenario.label); scenario.sIndex = i; scenario.selectors = scenario.selectors || []; scenario.viewports && scenario.viewports.forEach(saveViewportIndexes); scenarios.push(scenario); if (!config.isReference && scenario.hasOwnProperty('variants')) { scenario.variants.forEach(function (variant) { // var variantLabelSafe = makeSafe(variant.label); variant._parent = scenario; scenarios.push(scenario); // processScenario(casper, variant, variantLabelSafe, scenarioLabelSafe, viewports, bitmapsReferencePath, bitmapsTestPath, screenshotDateTime); }); } }); var scenarioViewId = 0; scenarios.forEach(function (scenario) { var desiredViewportsForScenario = config.viewports; if (scenario.viewports && scenario.viewports.length > 0) { desiredViewportsForScenario = scenario.viewports; } desiredViewportsForScenario.forEach(function (viewport) { scenarioViews.push({ scenario: scenario, viewport: viewport, config: config, id: scenarioViewId++ }); }); }); const asyncCaptureLimit = config.asyncCaptureLimit === 0 ? 1 : config.asyncCaptureLimit || CONCURRENCY_DEFAULT; if (/chrom./i.test(config.engine)) { const PORT = (config.startingPort || CHROMY_STARTING_PORT_NUMBER); var getFreePorts = require('./getFreePorts'); return getFreePorts(PORT, scenarioViews.length).then(freeports => { console.log('These ports will be used:', JSON.stringify(freeports)); scenarioViews.forEach((scenarioView, i) => { scenarioView.assignedPort = freeports[i]; }); return pMap(scenarioViews, runChromy, { concurrency: asyncCaptureLimit }); }); } else if (config.engine.startsWith('puppet')) { return pMap(scenarioViews, runPuppet, { concurrency: asyncCaptureLimit }); } else { logger.error(`Engine "${(typeof config.engine === 'string' && config.engine) || 'undefined'}" not recognized! If you require PhantomJS or Slimer support please use backstopjs@3.8.8 or earlier.`); } }
async invokeChunkGenerators( worker: PundleWorker, { job, chunks }: { job: Job, chunks: Array<Chunk> }, ): Promise<ChunksGenerated> { const outputs = [] const generators = this.getComponents('chunk-generator') if (!generators.length) { throw new PundleError('WORK', 'GENERATE_FAILED', 'No chunk generators configured') } await pMap(chunks, async chunk => { let generated = null for (const generator of generators) { generated = await generator.callback({ job, chunk, context: this, worker, }) if (generated) { try { await validators.generated(generated) } catch (error) { if (error && error.name === 'ValidationError') { throw new PundleError( 'WORK', 'GENERATE_FAILED', `Chunk Generator '${generator.name}' returned invalid results: ${error.errors.join(', ')}`, ) } throw error } break } } if (!generated) { const ps = [] if (chunk.filePath) { ps.push(`with entry '${chunk.filePath}'`) } if (chunk.label) { ps.push(`with label '${chunk.label}'`) } throw new PundleError( 'WORK', 'GENERATE_FAILED', `Chunk Generators refused to generate chunk of format '${chunk.format}'${ps ? ` ${ps.join(' ')}` : ''}`, ) } outputs.push({ chunk, format: generated.format, contents: generated.contents, filePath: this.getPublicPath({ ...chunk, format: generated.format }), sourceMap: generated.sourceMap || null, }) }) return { directory: this.config.output.rootDirectory, outputs, } }
return globby(patterns, options).then(files => pMap(files, mapper, options));