Example #1
0
	async.parallelLimit(globFunctions, batchCount, function(err, results) {
		if (err) {
			return callback(err);
		}
		results = (options.noGlob) ? files : _.flatten(results, true);

		files = _.chain(results.sort())
					.unique(true)
					.filter(function(file) {
						return (file[file.length-1] !== '/');
					})
					.value();


		files.forEach(function(file) {
			readFileFunctions.push(handleReadFile(file));
		});

		async.parallelLimit(readFileFunctions, batchCount, function(err, fileDataList) {
			if (err) {
				return callback(err);
			}
			var hash = crypto.createHash(algorithm);
			hash.update(Buffer.concat(fileDataList));
			
			callback(null, hash.digest('hex'));
		});
	});
var uploadPhotosToPhotoset = module.exports.uploadPhotosToPhotoset = function(flickrApi, photoset, dirPath, files, callback) {
  var tasks = [];
  _.each(files, function(file, parallelCallback) {
    tasks.push(
      function(parallelCallback) {
        async.waterfall([
          function(next) {
            uploadPhotos(flickrApi, dirPath, [file], next);
          },
          function(photos, next) {
            winston.info("Add photo "+photos[0].id+" to photoset "+photoset.id+".");
            flickrApi.photosets.addPhoto({'photoset_id': photoset.id, 'photo_id': photos[0].id}, function(error, result) {
              if (error) {
                winston.error("Add photo "+photos[0].id+" to photoset "+photoset.id+".", error.toString());
              }
              next(null, result);
            });
          }
        ], parallelCallback);
      }
    );
  });
  var parallelUpload = (conf && conf.photos) ? conf.photos.parallelUploadPhotos : 1;
  async.parallelLimit(tasks, parallelUpload, callback); 
};
var scrapeCards = function() {
	var abbreviations = KeywordsController.getSetAbrreviations();

	for(var aKey in abbreviations) {
		var setId = abbreviations[aKey];
		for(var rKey in rarities) {
			var rarity = rarities[rKey];
			(function(setId, rarity) {
				scrapers.push(function(callback) {
					console.log('attempting to scrape ' + setId + ' rarity ' + rarity);
					var url = gathererUrl + '&rarity=' + rarity + '&set=' + setId;
					var r = request(url).pipe(fs.createWriteStream(__dirname + '/../setImages/' + setId + '_' + rarity + '.jpg'));
					r.on('close', function() {
						callback();
					})
				});
			})(setId, rarity);
			}
		}

	console.log('attempting to start scrapers');
	console.log('total scrapers: ' + scrapers.length);
	async.parallelLimit(scrapers, 50, function(err, results) {
		if(err) {
			console.log('error', err);
		}
		console.log('done!');
	});
}
Example #4
0
		parallel: function(methods, callback, pLimit) {
			var limit = pLimit && pLimit >= 1 ? pLimit : 5;

			async.parallelLimit(methods, limit, function(err, results) {
				callback(err, results);
			});
		},
    return new Q.Promise(function (resolve, reject) {
      try {
        var allChecks = [];

        for (var i = 0; i < layersList.length; i++) {
          var layerID = layersList[i];
          allChecks.push(this.dockerRegistry.downloadImageGetSize(hubResult, layerID));
        }

        async.parallelLimit(allChecks, 10,
        function(err, results) {
          if (err) {
            return reject(err);
          }

          var totalSize = _.reduce(results, function(sum, num) {
            return sum + num;
          }, 0);

          log.debug('\n\n:: syncronizer - getSizes ::');
          log.debug('layers:', layersList.length);

          return resolve(totalSize);
        });
      } catch (err) {
        log.error(err.stack);
        reject(err);
      }
    }.bind(this));
Example #6
0
      trade_item_db.getTradeItems(query, batch, 10, function (err, items) {
        if (err) return next(err)

        log.info('migrate_trade_items getTradeItems return item count: ' + items.length)

        if (!items || !items.length) {
          res.jsonp({msg: 'Migrated ' + gtinsMigrated.length + ' items for recipient ' + recipient + ', GTINs: ' + gtinsMigrated.join(', ')})
          return res.end()
        }

        var tasks = []
        items.forEach(function (item) {
          log.debug('migrating (resaving) tradeitem with gtin ' + item.gtin)
          tasks.push(function (callback) {
            trade_item_db.saveTradeItem(item, callback)
          })
        })
        async.parallelLimit(tasks, config.concurrency, function (err, results) {
          if (err) return next(err)
          log.debug('parallel results: ' + JSON.stringify(results))
          results = _.flatten(results)
          gtinsMigrated = gtinsMigrated.concat(results)

          setTimeout(function () {
            migrateItemBatch(batch + 1)
          }, 500)
        }) // end async.parallelLimit
      }) // end trade_item_db.getTradeItems
  Oracle.prototype.discoverModelDefinitions = function(options, cb) {
    if (!cb && typeof options === 'function') {
      cb = options;
      options = {};
    }
    options = options || {};

    var self = this;
    var calls = [function(callback) {
      self.execute(queryTables(options), callback);
    }];

    if (options.views) {
      calls.push(function(callback) {
        self.execute(queryViews(options), callback);
      });
    }
    async.parallelLimit(calls, this.parallelLimit, function(err, data) {
      if (err) {
        cb(err, data);
      } else {
        var merged = [];
        merged = merged.concat(data.shift());
        if (data.length) {
          merged = merged.concat(data.shift());
        }
        cb(err, merged);
      }
    });
  };
	ImportTopics.prototype.saveTopics = function (topics, cb) {
		var content = {completed: false};

		//create tasks
		var dao = new pb.DAO();
		var tasks = util.getTasks(topics, function (topicArry, index) {
			return function (callback) {

				dao.count('topic', {name: topicArry[index].trim()}, function (err, count) {
					if (count > 0) {
						return callback(null, true);
					}

					var topicDocument = pb.DocumentCreator.create('topic', {name: topicArry[index].trim()});
					dao.save(topicDocument, callback);
				});

			};
		});

		//execute in parallel
		async.parallelLimit(tasks, 3, function (err, results) {
			if (util.isError(err)) {
				return cb({
					code: 500,
					content: pb.BaseController.apiResponse(pb.BaseController.API_ERROR, self.ls.get('ERROR_SAVING'))
				});
			}

			cb({content: pb.BaseController.apiResponse(pb.BaseController.API_SUCCESS, loc.topics.TOPICS_CREATED)});
		});
	};
Example #9
0
File: Work.js Project: VSWS/xyz
 Work.find(query, projection, function (err, works) {
     if (err) callback(err);
     else {
         var tasks = [];
         works.forEach(function (work) {
             tasks.push(function (callback) {
                 work.getCopyCounts(function (err, totalCopies, mDiskCopiesSold, editionsSold, totalLocked, totalUnclaimed, mDiskCopiesAvailable, editionsAvailable, totalTimesSold) {
                     if (err) callback(err);
                     else {
                         var _work = work.toObject();
                         _work.counts = {
                             totalCopies: totalCopies,
                             mDiskCopiesSold: mDiskCopiesSold,
                             editionsSold: editionsSold,
                             totalLocked: totalLocked,
                             totalUnclaimed: totalUnclaimed,
                             mDiskCopiesAvailable: mDiskCopiesAvailable,
                             editionsAvailable: editionsAvailable,
                             totalSold: mDiskCopiesSold + editionsSold,
                             copiesAvailable: (mDiskCopiesAvailable + editionsAvailable) - (totalLocked + totalUnclaimed),
                             copiesSoldInTotal: totalTimesSold
                         };
                         callback(null, _work);
                     }
                 });
             });
         });
         async.parallelLimit(tasks, 20, callback);
     }
 });
 return ProjectEntityHandler.getAllFiles(project_id, function(error, files) {
   if (error != null) {
     return callback(error)
   }
   const jobs = []
   for (let path in files) {
     const file = files[path]
     ;((path, file) =>
       jobs.push(callback =>
         FileStoreHandler.getFileStream(project_id, file._id, {}, function(
           error,
           stream
         ) {
           if (error != null) {
             logger.err(
               { err: error, project_id, file_id: file._id },
               'something went wrong adding file to zip archive'
             )
             return callback(err)
           }
           if (path[0] === '/') {
             path = path.slice(1)
           }
           archive.append(stream, { name: path })
           return stream.on('end', () => callback())
         })
       ))(path, file)
   }
   return async.parallelLimit(jobs, 5, callback)
 })
Example #11
0
        this.ts.registerLocal('urls', function(flag, cb) {

            var dao   = new pb.DAO();
            var today = new Date();
            var descriptors = {
                section: {
                    where: {type: {$ne: 'container'}},
                    weight: '0.5',
                    path: '/'
                },
                page: {
                    where: {publish_date: {$lte: today}},
                    weight: '1.0',
                    path: '/page/'
                },
                article: {
                    where: {publish_date: {$lte: today}},
                    weight: '1.0',
                    path: '/article/'
                }
            };
            var tasks = util.getTasks(Object.keys(descriptors), function(keys, i) {
                return function(callback) {
                    var data = descriptors[keys[i]];
                    data.select = {url: 1, last_modified: 1};
                    dao.q(keys[i], data, function(err, items) {
                        self.processObjects(items, data.path, data.weight, callback);
                    });
                };    
            });
            async.parallelLimit(tasks, 2, function(err, htmlParts) {
                cb(err, new pb.TemplateValue(htmlParts.join(''), false));
            });
        });
Example #12
0
/***
 * Generates an execution plan for the list of files.
 * Iterates through each and creates a task for each, then calls the parallel method of async to execute them.
 * @param files
 * @param zipfile_path
 * @param limit //optional
 * @param callback //optional
 */
function generateExecutionPlan(files, zipfile_path, limit, callback){
    console.log('begin generating zipfile');
    var tasks = [];

    var output = fs.createWriteStream(zipfile_path)
    var zip = archiver('zip');
    zip.on('error', function(err) {
        throw err;
    });
    zip.pipe(output);


    //generate list of tasks from files list.
    for(var ndx in files){
        var file = files[ndx];
        tasks.push(singleTask(file.url, file.path,zip));
    }

    if(limit){
        async.parallelLimit(tasks, completionTask(zip,zipfile_path,callback));
    }
    else{
        async.parallel(tasks, completionTask(zip,zipfile_path,callback));
    }

}
Example #13
0
 return new Promise(function(resolve){
   var parallelFiles = []
   files.forEach(function(file){
     parallelFiles.push(function(cb){
       return exec("git log --pretty=short --follow " + file + " | git shortlog --summary --numbered --no-merges --email")
       .then(function(stdout){
         if(stdout){
           cache.value.files[file] = {}
           stdout
             .trim("\n")
             .split("\n")
             .forEach(function(line){
               line = line.trim()
               cache.value.files[file][cache.value.mapByEmail[line.match(emailRE)[1]].login] = line.match(commitsRE)[1]
             })
         }
       }, function(stderr){
         console.error(stderr)
         throw stderr
       })
       .done(cb)
     })
   })
   async.parallelLimit(parallelFiles, 20, function(){
     // console.log(cache.value.files)
     gutil.log("Contributions map for files done")
     resolve()
   })
 })
Example #14
0
function publish() {
  changedPackages.forEach(function (name) {
    // prepublish script
    var prePub = getPackageLocation(name) + "/scripts/prepublish.js";
    if (fs.existsSync(prePub)) require(prePub);
  });

  async.parallelLimit(changedPackages.map(function (name) {
    return function run(done) {
      var loc = getPackageLocation(name);

      child.exec("cd " + loc + " && npm publish --tag prerelease", function (err, stdout, stderr) {
        if (err || stderr) {
          err = stderr || err.stack;
          console.error(err);
          if (err.indexOf("You cannot publish over the previously published version") < 0) {
            return run(done);
          }
        }

        console.log(stdout.trim());

        // postpublish script
        var postPub = loc + "/scripts/postpublish.js";
        if (fs.existsSync(postPub)) require(postPub);

        done();
      });
    };
  }), 4, function (err) {
    onError(err);
    ship();
  });
}
function parallel(tasks, limit, callback) {
    if (limit) {
        return async.parallelLimit(tasks, limit, callback);
    } 

    return async.parallel(tasks, callback);
}
		dao.count(qf, function(err, data) {
			if (err) {
				log.error(err);
				next(err);
			} else {
				if (data) {
					var mangFuncs = [];
					for (var i = 0; i < data.length; i++) {
						mangFuncs.push((function(j) {
							return function(callback) {
								setTimeout(listObjectMangler.mangle(data[j], compiledSchema, function(err, cb) {
									callback(err, cb);
								}), 0);
							};
						}(i)));
					}

					async.parallelLimit(mangFuncs, 3, function(err, cb) {
						if (err) {

							next(err);
						}

						callback(null,null,data);
					});
				} else {
						callback(null,null,data);
				}
			}
		});
Example #17
0
        parser.parseString(xml, function(err, result) {
            if(err) {
                var obj = [true, Error(url + ': error parsing xml')];
                results.push(obj);
                callback && callback.apply(this, obj);
                commonCallback(results);
                return;
            }

            var tasks = [];
            if(result && result.urlset && Array.isArray(result.urlset.url)) {
                result.urlset.url.forEach(function(el) {
                    el.loc && el.loc.forEach(function(url) {
                        tasks.push(function(cb) {
                            checkUrl(url, function(err, data) {
                                callback && callback(err, data);
                                cb(false, [err, data]);
                            }, settings);
                        });
                    });
                });
            }

            async.parallelLimit(tasks, getMaxRequest(settings), function(err, data) {
                commonCallback(data);
            });
        });
Example #18
0
    ImportTopics.prototype.saveTopics = function(topics, cb) {
        var self = this;
        //create tasks
        var tasks = util.getTasks(topics, function(topicArry, index) {
            return function(callback) {

                self.siteQueryService.count('topic', {name: topicArry[index].trim()}, function(err, count){
                    if (count > 0) {
                        return callback(null, true);
                    }

                    var topicDocument = pb.DocumentCreator.create('topic', {name: topicArry[index].trim()});
                    self.siteQueryService.save(topicDocument, callback);
                });

            };
        });

        //execute in parallel
        async.parallelLimit(tasks, 3, function(err, results){
            if(util.isError(err)) {
                return cb({
                    code: 500,
                    content: pb.BaseController.apiResponse(pb.BaseController.API_ERROR, self.ls.g('generic.ERROR_SAVING'))
                });
            }

            cb({content: pb.BaseController.apiResponse(pb.BaseController.API_SUCCESS, self.ls.g('topics.TOPICS_CREATED'))});
        });
    };
Example #19
0
Deploid.prototype._deployNodes = function( done ) {

	console.error( 'Deploying nodes.' );

	var self = this;
	var tasks = [];

	var pushTask = function( node ) {
		tasks.push( function( done ) {
			self._deployNode( node, done );
		} );
	};

	this._config.nodes.forEach( pushTask );

	// clusters should run at least 25% idle, so having 10% of nodes offline at a time should be OK.
	var limit = Math.max( 1, Math.ceil( this._config.nodes.length / 10 ) );

	async.parallelLimit( tasks, limit, function( err ) {
		if ( err ) {
			done( err );
			return;
		}

		console.error( 'Deployed nodes.' );
		done();
	} );

};
Example #20
0
 return function (callback) {
     var users = app._users_;
     if (_.size(users) > 0) {
         async.parallelLimit(_.map(users, function (user) {
             return function (callback) {
                 user.__getAllScores__().then(function (res) {
                     console.log(user._id_ + ' get scores success at ' + new Date());
                     callback(null, res);
                 }, function (err) {
                     console.log(user._id_ + ' get scores failed at ' + new Date() + ', because of ' + err);
                     callback(null);
                 });
             }
         }), 3, function (err, results) {
             err ? console.log('Get scores mission failed at ' + new Date() + ' because of ' + err):
                 console.log('Get scores mission over at ' + new Date());
             callback(err, results);
         });
     }
     else {
         var message = 'Get scores unavailable because of no user at' + new Date();
         console.log(message);
         callback(null);
     }
 };
conn.login(process.env.F3_USERNAME, process.env.F3_PASSWORD, function(err, res) {

	var fns = [];
	for (var i = 0; i < 1000; i++) {

		var fn = (function(idx) {
			return function(cb) {
				var path = "/MRK_CTI_DataLookup?LK_TYPE=ANI&ANI=1" + idx + "&ST";
				conn.apex.get(path, function(err, res) {
					if (err) {
						return console.error(err);
					}
					console.log((new Date()) + " request 200 OK: " + path);
					
					cb(null);
				});
			}
		})(i);


		fns.push(fn);
	}

	async.parallelLimit(fns, 20);

});
Example #22
0
function createZip(fn) {
  var zip = new JSZip(null, {
    type: 'nodebuffer'
  });
  var debug = require('debug')('lone:bundle:zip');

  debug('reading files');

  async.parallelLimit(config.manifest.files.map(function(p) {
    return function(cb) {
      fs.readFile(config.src + '/' + p, function(err, data) {
        if (err) {
          return cb(err);
        }

        zip.file(p, data);
        cb(null);
      });
    };
  }), 100, function(err) {
    if (err) {
      return fn(err);
    }
    debug('writing', config.bundle);
    fs.writeFile(config.bundle, zip.generate({
      type: 'nodebuffer'
    }), fn);
  });
}
Example #23
0
    const bootstrapBatch = () => {

      // Get all packages that have no remaining dependencies within the repo
      // that haven't yet been bootstrapped.
      const batch = todoPackages.filter(pkg => {
        const node = filteredGraph.get(pkg.name);
        return !node.dependencies.filter(dep => !donePackages[dep]).length;
      });

      async.parallelLimit(batch.map(pkg => done => {
        async.series([
          cb => FileSystemUtilities.mkdirp(pkg.nodeModulesLocation, cb),
          cb => this.installExternalPackages(pkg, cb),
          cb => this.linkDependenciesForPackage(pkg, cb),
          cb => this.runPrepublishForPackage(pkg, cb),
        ], err => {
          this.progressBar.tick(pkg.name);
          donePackages[pkg.name] = true;
          todoPackages.splice(todoPackages.indexOf(pkg), 1);
          done(err);
        });
      }), this.concurrency, err => {
        if (todoPackages.length && !err) {
          bootstrapBatch();
        } else {
          this.progressBar.terminate();
          callback(err);
        }
      });
    }
Example #24
0
Addresses.prototype.summary = function(addresses, callback) { 
  // the docs/source code would suggest that it's possible to get multiple 
  // addresses within one request, but it doesn't work
  // example: https://test-insight.bitpay.com/api/addr/mpNDUWcDcZw1Teo3LFHvr8usNdwDLKdTaY,mv3fK2ME7g9K4HswGXs6mG92e7gRgsTsqM

  var self = this

  var makeRequest = function(addr, callback) {
    request.get(self.url + addr).end(function(res) {
      if (!res.ok) return callback(new Error('non-ok http status code'), res)

      var data = {
        address: addr,
        balance: res.body.balanceSat, //this could be a problem for altcoins that don't fit in a JS number (53 bits)
        totalReceived: res.body.totalReceivedSat,
        txCount: res.body.txApperances //mispelled in the API
      }

      callback(null, data)
    })
  }

  var addrs = Array.isArray(addresses) ? addresses : [ addresses ]
  var fns = addrs.map(function(addr) {
    return function(callback) { makeRequest(addr, callback) }
  })

  async.parallelLimit(fns, self._limit, function(err, results) {
    if (err) return callback(err, results)
    callback(null, Array.isArray(addresses) ? results : results[0])
  })
}
Example #25
0
 rimrafNodeModulesInPackages(callback) {
   async.parallelLimit(this.packagesToClean.map((pkg) => (cb) => {
     FileSystemUtilities.rimraf(pkg.nodeModulesLocation, (err) => {
       progressBar.tick(pkg.name);
       cb(err);
     });
   }), this.concurrency, callback);
 }
Example #26
0
 // then run jobs in parallel, set concurrent parameter to 4
 function(callback)
 {
     // console.log('pageCount is '+pageCount);
     let pages = Array.from(Array(pageCount).keys()).map(e=>e+1);
     async.parallelLimit(getFuncArr(baseUrl,pages), 10, function(err,results){
         callback(null,2);
     });
 }
Example #27
0
    }, function (err, results) {
        if (err) {
            return callback(err);
        }
        var
            articles = results.articles,
            website = results.website,
            last_publish_at = articles.length === 0 ? 0 : articles[0].publish_at,
            rss_header,
            rss_footer = '</channel></rss>';

        rss_header = '<?xml version="1.0"?>\n' +
                '<rss version="2.0"><channel><title><![CDATA[' +
                website.name +
                ']]></title><link>http://' +
                domain +
                '/</link><description><![CDATA[' +
                website.description +
                ']]></description><lastBuildDate>' +
                toRssDate(last_publish_at) +
                '</lastBuildDate><generator>iTranswarp.js</generator><ttl>3600</ttl>';

        if (articles.length === 0) {
            return callback(null, rss_header + rss_footer);
        }
        // find texts:
        async.parallelLimit(_.map(articles, function (a) {
            return function (callback) {
                Text.find(a.content_id, callback);
            };
        }), 5, function (err, texts) {
            if (err) {
                return callback(err);
            }
            var n = 0, L = [rss_header];
            _.each(articles, function (a) {
                var
                    url = 'http://' + domain + '/article/' + a.id,
                    content = utils.md2html((texts[n] && texts[n].value) || '');
                n++;
                L.push('<item><title><![CDATA[');
                L.push(a.name);
                L.push(']]></title><link>');
                L.push(url);
                L.push('</link><guid>');
                L.push(url);
                L.push('</guid><author><![CDATA[');
                L.push(a.user_name);
                L.push(']]></author><pubDate>');
                L.push(toRssDate(a.publish_at));
                L.push('</pubDate><description><![CDATA[');
                L.push(content);
                L.push(']]></description></item>');
            });
            L.push(rss_footer);
            callback(null, L.join(''));
        });
    });
Example #28
0
Check.methods.removeStats = function(callback) {
  var self = this;
  async.parallelLimit([
    function(cb) { CheckHourlyStat.remove({ check: self._id }, cb); },
    function(cb) { CheckDailyStat.remove({ check: self._id }, cb); },
    function(cb) { CheckMonthlyStat.remove({ check: self._id }, cb); },
    function(cb) { CheckYearlyStat.remove({ check: self._id }, cb); }
  ], 2, callback);
};
Example #29
0
exports.getSiteStats = function(callback) {

    function as(name, callback) {
        return function(err, results) {
            if (err)
                return callback(err);

            assert(results.rows.length === 1);
            callback(null, [name, results.rows[0]]);
        }
    }

    var tasks = [
        function(callback) {
            query('SELECT COUNT(*) FROM users', as('users', callback));
        },
        function (callback) {
            query('SELECT COUNT(*) FROM games', as('games', callback));
        },
        function(callback) {
            query('SELECT COALESCE(SUM(fundings.amount), 0) sum FROM fundings WHERE amount < 0', as('withdrawals', callback));
        },
        function(callback) {
            query('SELECT SUM(giveaways.amount) FROM giveaways', as('give_aways', callback));
        },
        function(callback) {
            query("SELECT COUNT(*) FROM games WHERE ended = false AND created < NOW() - interval '5 minutes'", as('unterminated_games', callback));
        },
        function(callback) {
            query('SELECT COUNT(*) FROM fundings WHERE amount < 0 AND bitcoin_withdrawal_txid IS NULL', as('pending_withdrawals', callback));
        },
        function(callback) {
            query('SELECT COALESCE(SUM(fundings.amount), 0) sum FROM fundings WHERE amount > 0', as('deposits', callback));
        },
        function(callback) {
            query('SELECT ' +
                'COUNT(*) count, ' +
                'SUM(plays.bet) total_bet, ' +
                'SUM(plays.cash_out) cashed_out, ' +
                'SUM(plays.bonus) bonused ' +
                'FROM plays INNER JOIN games ON games.id = plays.game_id', as('plays', callback));
        }
    ];

    async.parallelLimit(tasks, 3, function(err, results) {
       if (err) return callback(err);

       var data = {};

        results.forEach(function(entry) {
           data[entry[0]] = entry[1];
        });

        callback(null, data);
    });

};
module.exports = function linkDependencies(
  packages,
  packagesLoc,
  currentVersion,
  independent,
  callback
) {
  var completed = false;
  var tick = progressBar(packages.length);

  logger.log("info", "Linking all dependencies", true);

  async.parallelLimit(packages.map(function (root) {
    return function (done) {
      var tasks = [];
      var packageLoc = path.join(packagesLoc, root.folder);
      var nodeModulesLoc = path.join(packageLoc, "node_modules");

      tasks.push(function (done) {
        fsUtils.mkdirp(nodeModulesLoc, done);
      });

      tasks.push(function (done) {
        npmUtils.installInDir(packageLoc, done);
      });

      tasks.push(function (done) {
        linkDependenciesForPackage(
          root.pkg,
          packages,
          packagesLoc,
          nodeModulesLoc,
          currentVersion,
          independent,
          done
        );
      });

      tasks.push(function (done) {
        if (!completed) tick(root.name);
        done();
      });

      async.series(tasks, done);
    };
  }), 4, function (err) {
    tick.terminate();
    // don't display the ticker if we hit an error and we still have workers
    completed = true;
    if (err) {
      logger.log("error", "Errored while linking all dependencies", true, err);
    } else {
      logger.log("success", "Successfully linked all dependencies", true);
    }
    callback(err);
  });
}