var pairwiseGeocoderIterator = function(from1, from2, type) { var readStream = new stream.Readable({objectMode:true}); var iterators = [from1.geocoderDataIterator(type), from2.geocoderDataIterator(type)] var nexts = [null, null] var fetchq = queue(); var nextq = queue(1); var advance = function(num) { fetchq.defer(function(cb) { iterators[num].asyncNext(function(err, row) { nexts[num] = row; cb(err); })}); } advance(0); advance(1); readStream._read = function() { nextq.defer(function(cb) { fetchq.awaitAll(function(err) { if (err) readStream.emit('error', err); // reset the fetch queue so we can call await on it again fetchq = queue(); var out; if (nexts[0].done && nexts[1].done) { // both sides are done readStream.push(null); } else if (!nexts[0].done && (nexts[1].done || nexts[0].value.shard < nexts[1].value.shard)) { // return and advance next[0] out = nexts[0]; advance(0); readStream.push({ shard: out.value.shard, data1: out.value.data, data2: undefined }); } else if (!nexts[1].done && (nexts[0].done || nexts[1].value.shard < nexts[0].value.shard)) { // return and advance next[1] out = nexts[1]; advance(1); readStream.push({ shard: out.value.shard, data2: out.value.data, data1: undefined }); } else if (nexts[0].value.shard == nexts[1].value.shard) { // return and advance both var out1 = nexts[0], out2 = nexts[1]; advance(0); advance(1); readStream.push({ shard: out1.value.shard, data1: out1.value.data, data2: out2.value.data }); } else { readStream.emit(new Error("merge error")); } cb(); }); }); }; return readStream; }
function idGeocode(geocoder, asId, options, callback) { var q = queue(5); var extid = asId.dbname + '.' + asId.id; var indexes = geocoder.bytype[asId.dbname]; for (var i = 0; i < indexes.length; i++) { q.defer(function(source, id, done) { feature.getFeatureById(source, id, function(err, data) { if (err) return done(err); if (!data) return done(); data.properties['carmen:extid'] = extid; done(null, data); }); }, indexes[i], asId.id); } q.awaitAll(function(err, features) { if (err) return callback(err); var result = { "type": "FeatureCollection", "query": [extid], "features": [] }; for (var i = 0; i < features.length; i++) { if (!features[i]) continue; var f = ops.toFeature([features[i]]); f.relevance = 1; result.features.push(f); } return callback(null, result); }); }
this.Before((scenario, callback) => { this.osrmLoader.setLoadMethod(this.DEFAULT_LOAD_METHOD); this.setGridSize(this.DEFAULT_GRID_SIZE); this.setOrigin(this.DEFAULT_ORIGIN); this.queryParams = {}; this.extractArgs = ''; this.contractArgs = ''; this.partitionArgs = ''; this.customizeArgs = ''; this.environment = Object.assign(this.DEFAULT_ENVIRONMENT); this.resetOSM(); this.scenarioID = this.getScenarioID(scenario); this.setupScenarioCache(this.scenarioID); // setup output logging let logDir = path.join(this.LOGS_PATH, this.featureID); this.scenarioLogFile = path.join(logDir, this.scenarioID) + '.log'; d3.queue(1) .defer(mkdirp, logDir) .defer(rimraf, this.scenarioLogFile) .awaitAll(callback); // uncomment to get path to logfile // console.log(" Writing logging output to " + this.scenarioLogFile) });
this.getOSRMHash = (callback) => { let dependencies = [ this.OSRM_EXTRACT_PATH, this.OSRM_CONTRACT_PATH, this.LIB_OSRM_EXTRACT_PATH, this.LIB_OSRM_CONTRACT_PATH ]; var addLuaFiles = (directory, callback) => { fs.readdir(path.normalize(directory), (err, files) => { if (err) return callback(err); var luaFiles = files.filter(f => !!f.match(/\.lua$/)).map(f => path.normalize(directory + '/' + f)); Array.prototype.push.apply(dependencies, luaFiles); callback(); }); }; // Note: we need a serialized queue here to ensure that the order of the files // passed is stable. Otherwise the hash will not be stable d3.queue(1) .defer(addLuaFiles, this.PROFILES_PATH) .defer(addLuaFiles, this.PROFILES_PATH + '/lib') .awaitAll(hash.hashOfFiles.bind(hash, dependencies, callback)); };
fetchq.awaitAll(function(err) { if (err) readStream.emit('error', err); // reset the fetch queue so we can call await on it again fetchq = queue(); var out; if (nexts[0].done && nexts[1].done) { // both sides are done readStream.push(null); } else if (!nexts[0].done && (nexts[1].done || nexts[0].value.shard < nexts[1].value.shard)) { // return and advance next[0] out = nexts[0]; advance(0); readStream.push({ shard: out.value.shard, data1: out.value.data, data2: undefined }); } else if (!nexts[1].done && (nexts[0].done || nexts[1].value.shard < nexts[0].value.shard)) { // return and advance next[1] out = nexts[1]; advance(1); readStream.push({ shard: out.value.shard, data2: out.value.data, data1: undefined }); } else if (nexts[0].value.shard == nexts[1].value.shard) { // return and advance both var out1 = nexts[0], out2 = nexts[1]; advance(0); advance(1); readStream.push({ shard: out1.value.shard, data1: out1.value.data, data2: out2.value.data }); } else { readStream.emit(new Error("merge error")); } cb(); });
function reduceBodyToImageObject(res, body) { if (res.statusCode < 200 || res.statusCode > 299) { done(new Error('Request failed. Status code: ' + res.statusCode)); return; } var item = probable.pickFromArray(body.collection.items); var thumbnailURL = item.links[0].href; // Warning: This is brittle. Proper way would be to get the JSON // file at `data.href` then get the image link from there. // ~orig is often too much memory, but worth trying if medium doesn't exist. var q = queue(); q.defer(checkMIMEType, thumbnailURL.replace('~thumb', '~medium')); q.defer(checkMIMEType, thumbnailURL.replace('~thumb', '~orig')); q.await(sb(passResult, done)); function passResult(mediumURL, origURL) { var result = { id: item.data[0].nasa_id, title: item.data[0].title, image: mediumURL ? mediumURL : origURL }; done(null, result); } }
_configure(function(e,r){ var ydq if(e) throw new Error(e) ydq = queue(1) years.forEach(function(year){ districts.forEach(function(district){ var o = Object.assign({},opts) o.env = Object.assign({},opts.env) o.env.RYEAR = year o.env.RDISTRICT=district //o.district = district o.env.CALVAD_PEMS_ROOT=config.calvad.vdspath o.env.R_CONFIG=config_file o.calvad = Object.assign({},config.calvad) o.couchdb = config.couchdb o.env.CALVAD_FORCE_PLOT=redo_plot ydq.defer(year_district_handler,o,trigger_R_job,redo_plot) return null }) return null }) ydq.await(function(){ // finished loading up all of the files into the file_queue, so // set the await on that console.log('ydq has drained') return null }) return null })
.awaitAll((err, res) => { if (err) return cb(err); // check if forw and backw returned the same values res.forEach((dirRes) => { var which = dirRes.which; delete dirRes.which; result[which] = dirRes; }); result.bothw = {}; var sq = d3.queue(); var parseRes = (key, scb) => { if (result.forw[key] === result.backw[key]) { result.bothw[key] = result.forw[key]; } else { result.bothw[key] = 'diff'; } scb(); }; ['rate', 'status', 'time', 'distance', 'speed' ,'mode'].forEach((key) => { sq.defer(parseRes, key); }); sq.awaitAll((err) => { cb(err, result); }); });
handler: function (request, reply) { if (config.layers[request.params.table]) { if (config.layers[request.params.table].maxzoom >= parseInt(request.params.z) && config.layers[request.params.table].minzoom <= parseInt(request.params.z)) { var bbox = sm.bbox(request.params.x, request.params.y, request.params.z); var vtile = new mapnik.VectorTile(parseInt(request.params.z, 10), parseInt(request.params.x, 10), parseInt(request.params.y)); var sql = formatSQL(config.layers["parcels"].table, config.layers["parcels"].geom_column, config.layers["parcels"].property_columns, bbox); //fetchGeoJSON(config.postgis, sql, vtile, reply, request); var q = d3.queue(); q.defer(fetchGeoJSON, config.postgis, sql); q.await(function(error, GeoJSON) { if (typeof GeoJSON == 'object') { vtile.addGeoJSON(JSON.stringify(GeoJSON), request.params.table); zlib.gzip(vtile.getDataSync(), function(err, pbf) { reply(pbf) .header('Content-Type', 'application/x-protobuf') .header('Content-Encoding', 'gzip') .header('Cache-Control', config.cache); }); } else { reply(GeoJSON); } }); } else { reply('Tile rendering error: this layer does not do tiles less than zoom level ' + config.layers[request.params.table].maxzoom); } } else { reply('Tile rendering error: this layer has no configuration.'); } }
config_okay(config_file,function(err,c){ var qb if(err){ throw new Error('node.js needs a good croak module') } config = Object.assign(config,c) config.calvad.districts = [district] config.couchdb=Object.assign({},c.couchdb) config.couchdb.testdb='test%2f'+test_db_unique config.couchdb.trackingdb = config.couchdb.testdb qb = queue(1) qb.defer(utils.demo_db_before(config)) qb.defer(function(cb){ // dump a temporary config file fs.writeFile(config_file_2,JSON.stringify(config), {'encoding':'utf8' ,'mode':0o600 },function(e){ should.not.exist(e) return cb(e) }) }) qb.await(function(e,r1,r2){ should.not.exist(e) return done() }) return null })
this.reprocessAndLoadData = (callback) => { let queue = d3.queue(1); queue.defer(this.writeAndLinkOSM.bind(this)); queue.defer(this.extractAndContract.bind(this)); queue.defer(this.osrmLoader.load.bind(this.osrmLoader), this.processedCacheFile); queue.awaitAll(callback); };
it('should not crash on no work',function(done){ var task ={'options':options ,'cell_id':'100_222' ,'year':2008 } var handler = routes.fractions_handler(hpmsgrids['2008']) queue() .defer(handler,task) .await(function(e,d){ should.not.exist(e) var len = Object.keys(task.accum).length len.should.equal(0) _.each(task.accum,function(v,k){ var totals = v.totals Object.keys(v).forEach(function(key){ if(key === 'totals') return null var record = v[key] _.each(record,function(vv,kk){ // totals should decrement down to zero totals[kk] -= vv return null }); return null }) _.each(totals,function(v){ v.should.be.approximately(0,0.01) // not exact return null }); }); return done() }) return null; })
var testSubMatching = (sub, si, scb) => { if (si >= subMatchings.length) { ok = false; q.abort(); scb(); } else { var sq = d3.queue(); var testSubNode = (ni, ncb) => { var node = this.findNodeByName(sub[ni]), outNode = subMatchings[si][ni]; if (this.FuzzyMatch.matchLocation(outNode, node)) { encodedResult += sub[ni]; extendedTarget += sub[ni]; } else { encodedResult += util.format('? [%s,%s]', outNode[0], outNode[1]); extendedTarget += util.format('%s [%d,%d]', node.lat, node.lon); ok = false; } ncb(); }; for (var i=0; i<sub.length; i++) { sq.defer(testSubNode, i); } sq.awaitAll(scb); } };
this.Given(/^the node map$/, (docstring, callback) => { var q = d3.queue(); var addNode = (name, ri, ci, cb) => { var lonLat = this.tableCoordToLonLat(ci, ri); if (name.match(/[a-z]/) ) { if (this.nameNodeHash[name]) throw new Error(util.format('*** duplicate node %s', name)); this.addOSMNode(name, lonLat[0], lonLat[1], null); } else if (name.match(/[0-9]/) ) { if (this.locationHash[name]) throw new Error(util.format('*** duplicate node %s', name)); this.addLocation(name, lonLat[0], lonLat[1], null); } cb(); }; docstring.split(/\n/).forEach( (row,ri) => { row.split('').forEach( (cell,ci) => { if( cell.match(/[a-z0-9]/) ) { q.defer(addNode, cell, ri, ci*0.5); } }); }); q.awaitAll(callback); });
createPreamble: function(context) { var preamble = d3.queue(); if (!context.template) { preamble.defer((next) => next(new template.NotFoundError('No template passed'))); } else if (typeof context.template === 'string') { preamble.defer( template.read, path.resolve(context.template), context.overrides.templateOptions ); } else { // we assume if template is not string, it's a pre-loaded template body object preamble.defer((next) => next(null, context.template)); } preamble.defer(lookup.configurations, context.baseName, context.configBucket, context.stackRegion); preamble.await(function(err, templateBody, configs) { if (err) { var msg = ''; if (err instanceof template.NotFoundError) msg += 'Could not load template: '; if (err instanceof template.InvalidTemplateError) msg += 'Could not parse template: '; if (err instanceof lookup.BucketNotFoundError) msg += 'Could not find config bucket: '; if (err instanceof lookup.S3Error) msg += 'Could not load saved configurations: '; msg += err.message; err.message = msg; return context.abort(err); } context.newTemplate = templateBody; context.configNames = configs; context.next(); }); },
,function(req,res,next){ var task=task_init(req) task.options = config queue() .defer(fractions_handler(hpmsgrids[task.year]),task) .await(function(e2,t){ var memo if(e2) return next(e2) memo = reduce.reduce({},task) if(csv_test.test(req.params.format)){ // respond with csv res.writeHead(200, { 'Content-Type': 'text/csv' }) stringify(arrayifier(memo),function(e3,arr){ if(e3) throw new Error(e3) res.end(arr) }) return null }else{ return res.json(memo) } }) return null })
/** * fractions_handler_one_hour * * Given an hpmsgrids for a particular year, handle a task to apply * fractions for all grids. This will go get the fractions for the * specified hour in the year (for both detector cells and hpms-only * cells), and then, for each grid cell, will multiply the hourly * fraction by the AADT for that grid cell. * * @param {Object} hpmsgrids - an object holding AADT values for each grid cell * @param {Object} task - an object holding the date to process * @returns {} a call to the callback. */ function fractions_handler_one_hour(hpmsgrids,task,cb){ queue(2) .defer(get_detector_fractions_one_hour,task) .defer(get_hpms_fractions_one_hour,task) .await(function(e,t1,t2){ if(e){ console.log(e) croak() return cb(e) } post_process_couch_query_one_hour( task ,function(ee){ if(ee){ console.log(ee) croak() return cb(ee) } task.aadt_store = hpmsgrids reduce.apply_fractions_one_hour(task,function(eee){ if(eee) return cb(eee) return cb(null,task) }) return null }) return null }) return null }
module.exports = function(geocoder, position, options, callback) { options = options || {}; var context = []; var indexes = geocoder.indexes; var index_ids = Object.keys(indexes); var maxidx = typeof options.maxidx === 'number' ? options.maxidx : index_ids.length; var full = options.full || false; var matched = options.matched || {}; var language = options.language || false; var subtypeLookup = getSubtypeLookup(options.types || []); index_ids = index_ids.slice(0, maxidx); // No-op context. if (!index_ids.length) return callback(null, context); var q = queue(); var lon = position[0]; var lat = position[1]; for (var index_ids_it = 0; index_ids_it < index_ids.length; index_ids_it++) { var source = indexes[index_ids[index_ids_it]]; var bounds = source.bounds; if (lat >= bounds[1] && lat <= bounds[3] && lon >= bounds[0] && lon <= bounds[2]) { // calculate score range for this index, if: // - context call is associated w/ a reverse geocode // - we are filtering on the parent type (eg poi) // - there is a scorerange entry on this index for the child type (eg landmark) var scoreRange = false; if (options.full && subtypeLookup[source.type] && source.scoreranges[subtypeLookup[source.type]]) scoreRange = [ source.scoreranges[subtypeLookup[source.type]][0] * source.maxscore, source.scoreranges[subtypeLookup[source.type]][1] * source.maxscore ]; // targetFeature = look for specific feature as top-most // * lower-level indexes must still be queried for context // but should not be told to look for the target feature // * indexes near the top which have the same type // as but are not the target feature index should be skipped var exclusiveMatched = false; if (options.targetFeature && (source.type === indexes[index_ids[index_ids.length - 1]].type)) { // if we have a target feature, only query the index containing it + its parents if (source.id !== options.targetFeature[0]) continue; exclusiveMatched = { _exclusive: true }; exclusiveMatched[options.targetFeature[1]] = true; } q.defer(contextVector, source, lon, lat, full, exclusiveMatched || matched, language, scoreRange); } } q.awaitAll(function(err, loaded) { if (err) return callback(err); return callback(null, stackFeatures(geocoder, loaded, options)); }); };
/** * @inheritdoc */ defaults () { return { app: null, source: '/path/to/images', temp: '/path/to/writable/directory', queue: d3.queue(64), }; }
,function(done){ function file_worker(f,cb){ return cb(null,f) } var fq = queue() var task={path:rootdir+'/test/files/grid/hourly/2009'} populate_files(fq,file_worker,rootdir,'test/files',[2009,2012],'grid','hourly',{couchdb:'blahblah'}, function(e,r){ should.not.exist(e) fq.awaitAll(function(e,results){ results.sort(function(a,b){ return a.file < b.file ? -1 : 1 }) results.length.should.eql(4) results[0].should.eql({ file:rootdir+'/test/files/hourly/2009/100/263.json' ,year:2009 ,grid:{'i_cell':100,'j_cell':263} ,i:100 ,j:263 ,options:{'couchdb':'blahblah'} }) results[1].should.eql({ file:rootdir+'/test/files/hourly/2009/133/154.json' ,year:2009 ,grid:{'i_cell':133,'j_cell':154} ,i:133 ,j:154 ,options:{'couchdb':'blahblah'} }) results[2].should.eql({ file:rootdir+'/test/files/hourly/2012/231/55.json' ,year:2012 ,grid:{'i_cell':231,'j_cell':55} ,i:231 ,j:55 ,options:{'couchdb':'blahblah'} }) results[3].should.eql({ file:rootdir+'/test/files/hourly/2012/300/250.json' ,year:2012 ,grid:{'i_cell':300,'j_cell':250} ,i:300 ,j:250 ,options:{'couchdb':'blahblah'} }) return done() }) }) return null })
this.When(/^I route (\d+) times I should get$/, { timeout: 30000 }, (n, table, callback) => { var q = d3.queue(1); for (var i=0; i<n; i++) { q.defer(this.WhenIRouteIShouldGet, table); } q.awaitAll(callback); });
function(){ var q = d3.queue(); for (var i = 0; i < targetPrjs.length; i++) { q.defer(getResults, targetPrjs[i]); } q.awaitAll(this); }
tape('build queued features', (t) => { const q = queue(); Object.keys(conf).forEach((c) => { q.defer((cb) => { buildQueued(conf[c], cb); }); }); q.awaitAll(t.end); });
jimp.read(loaderContext.resourcePath, function(err, img) { if (err) { return queueCallback(err); } function resizeImage(width, queueCallback) { img .clone() .resize(width, jimp.AUTO) .quality(quality) .background(background) .getBuffer(mime, function(err, buf) { if (err) { return queueCallback(err); } var fileName = loaderUtils.interpolateName(loaderContext, name + ext, {content: buf}).replace(/\[width\]/ig, width); loaderContext.emitFile(fileName, buf); queueCallback(null, { src: '__webpack_public_path__ + ' + JSON.stringify(fileName + ' ' + width + 'w'), path: '__webpack_public_path__ + ' + JSON.stringify(fileName), width: width }); }); } var q = queue(); var widthsToGenerate = new Set(); (Array.isArray(sizes) ? sizes : [sizes]).forEach(function(size) { var width = Math.min(img.bitmap.width, parseInt(size, 10)); // Only resize images if they aren't an exact copy of one already being resized... if (!widthsToGenerate.has(width)) { widthsToGenerate.add(width); q.defer(resizeImage, width); } }); q.awaitAll(function(err, files) { var srcset = files.map(function(f) { return f.src; }).join('+","+'); var images = files.map(function(f) { return '{path:' + f.path + ',width:' + f.width + '}'; }).join(','); var firstImagePath = files[0].path; loaderCallback(null, 'module.exports = {srcSet:' + srcset + ',images:[' + images + '],src:' + firstImagePath + ',toString:function(){return ' + firstImagePath + '}};'); }); });
test('add more records', function(t) { var q = queue(); for(var i=0; i< 3; i++){ q.defer(kinesis.putRecord.bind(kinesis), { Data: 'hello'+i, PartitionKey: 'a'+i, StreamName: 'teststream' }); } q.awaitAll(function(err) { t.error(err); t.end(); }); });
q.awaitAll((err, results) => { if (err) return callback(err); let q = d3.queue(); results.forEach(r => { if (r.stat.isDirectory() && r.file.search(osrmHash) < 0) { q.defer(rimraf, r.file); } }); q.awaitAll(callback); });
var getVectorTileFeatures = function(tiles, layers, callback) { var queue = d3.queue(5); tiles.forEach(function(tile) { queue.defer(attempt_retrieveTileData, layers, tile[0], tile[1], tile[2]); }); queue.awaitAll(function(err, results) { if (err) return callback(err, null); return callback(null, results); }); };
this.processRowsAndDiff = (table, fn, callback) => { var q = d3.queue(1); table.hashes().forEach((row, i) => { q.defer(fn, row, i); }); q.awaitAll((err, actual) => { if (err) return callback(err); this.diffTables(table, actual, {}, callback); }); };
exec(cmd, (err) => { if (err) { this.log(util.format('*** Exited with code %d', err.code), 'preprocess'); process.chdir('../'); return callback(this.ContractError(err.code, util.format('osrm-contract exited with code %d', err.code))); } var rename = (file, cb) => { this.log(util.format('Renaming %s.%s to %s.%s', this.osmData.extractedFile, file, this.osmData.contractedFile, file), 'preprocess'); fs.rename([this.osmData.extractedFile, file].join('.'), [this.osmData.contractedFile, file].join('.'), (err) => { if (err) return cb(this.FileError(null, 'failed to rename data file after contracting.')); cb(); }); }; var renameIfExists = (file, cb) => { fs.stat([this.osmData.extractedFile, file].join('.'), (doesNotExistErr, exists) => { if (exists) rename(file, cb); else cb(); }); }; var copy = (file, cb) => { this.log(util.format('Copying %s.%s to %s.%s', this.osmData.extractedFile, file, this.osmData.contractedFile, file), 'preprocess'); fs.createReadStream([this.osmData.extractedFile, file].join('.')) .pipe(fs.createWriteStream([this.osmData.contractedFile, file].join('.')) .on('finish', cb) ) .on('error', () => { return cb(this.FileError(null, 'failed to copy data after contracting.')); }); }; var q = d3.queue(); ['osrm', 'osrm.core', 'osrm.datasource_indexes', 'osrm.datasource_names', 'osrm.ebg','osrm.edges', 'osrm.enw', 'osrm.fileIndex', 'osrm.geometry', 'osrm.hsgr', 'osrm.icd','osrm.level', 'osrm.names', 'osrm.nodes', 'osrm.properties', 'osrm.ramIndex', 'osrm.restrictions'].forEach((file) => { q.defer(rename, file); }); ['osrm.edge_penalties', 'osrm.edge_segment_lookup'].forEach(file => { q.defer(renameIfExists, file); }); [].forEach((file) => { q.defer(copy, file); }); q.awaitAll((err) => { this.log('Finished contracting ' + this.osmData.contractedFile, 'preprocess'); process.chdir('../'); callback(err); }); });
this.extractAndContract = (callback) => { // a shallow copy of scenario parameters to avoid data inconsistency // if a cucumber timeout occurs during deferred jobs let p = {extractArgs: this.extractArgs, contractArgs: this.contractArgs, profileFile: this.profileFile, inputCacheFile: this.inputCacheFile, processedCacheFile: this.processedCacheFile, environment: this.environment}; let queue = d3.queue(1); queue.defer(this.extractData.bind(this), p); queue.defer(this.contractData.bind(this), p); queue.awaitAll(callback); };