static push(file, callback) { let file_path = null; let owned = false; const queue = new Queue(1); // ensure there is a file on disk queue.defer(function(callback) { if (!file.pipe) return callback(null, (file_path = file)); if (fs.existsSync(file_path = file.path)) return callback(); // use if exists on disk callback = _.once(callback); file_path = randomFilename(); owned = true; file.pipe(fs.createWriteStream(file_path)) .on('finish', callback) .on('error', callback); }); // run push command queue.defer(function(callback) { if (runCommand('push', [file_path, '-Source', 'nuget.org', '-NonInteractive']).code !== 0) { return callback(new Error(`Failed to push file: ${file.path}`)); } return callback(); }); // clean up temp file if needed return queue.await(function(err) { if (file_path && owned && fs.existsSync(file_path)) { fs.unlinkSync(file_path); } return callback(err); }); }
_.forEach(methods, method => { queue.defer(callback => { const user = models.adminUser const req = { user, method, query: {}, body: {}, } authFn({user, req}, (err, ok) => { expect(err).toNotExist() expect(ok).toExist() callback() }) }) })
function vectorize(feat, source, done) { var q = new queue(); var zxys = zxyArray(feat); for (var i = 0; i < zxys.length; i++) q.defer(function(zxy, done) { source.getTile(zxy[0],zxy[1],zxy[2], function(err, res) { if (err) { addData(); } else { var tmpTile = new mapnik.VectorTile(zxy[0],zxy[1],zxy[2]); tmpTile.setData(res,function(err) { if (err) { addData(); } else { addData(tmpTile); } }); } }); function addData(currentTile) { if (currentTile) { var feats = JSON.parse(currentTile.toGeoJSONSync('data')).features; feats.push(feat); } else { feats = [feat] } var vtile = new mapnik.VectorTile(zxy[0],zxy[1],zxy[2]); vtile.addGeoJSON(JSON.stringify({ type: 'FeatureCollection', features: feats }, null, 2), 'data'); vtile.getData({compression:'gzip'}, function(err, buffer) { if (err) return done(err); source.putTile(zxy[0],zxy[1],zxy[2], buffer, function(err) { done(err); }); }); } }, zxys[i]); q.awaitAll(function(err) { return done(err); }); }
_.forEach(methods, method => { queue.defer(callback => { const user = models.teacherUser const req = { user, method, params: {id: modelId}, query: {}, body: {}, } authFn({user, req}, (err, ok) => { expect(err).toNotExist() expect(ok).toNotExist() callback() }) }) })
files.forEach(function(f){ fq.defer(process_file,f) return null })
prepare_temp_tables(client, function () { var copy_statement = bt_parser.copy_statement(config.postgresql.table) var files = ["test/bluetooth_log-2014-07-28-21\:00\:00.048" //bit file1210 records ,"test/bluetoothdump" // small file, 50 records ] var fq = new queue(1) fq.defer(function(cb){ var create_statement = bt_parser.create_perlhash_statement('perlhash') client.query(create_statement,function(e,r){ //console.log(e) should.not.exist(e) return cb(e) }) }) function process_file(file,callback){ var _reader = rw.fileReader(file) var parser_instance var writer = client.copyFrom( copy_statement ); writer.on('error', function (error) { console.log("Sorry, error happens", error); throw new Error("COPY FROM stream should not emit errors" + JSON.stringify(error)) }); writer.on('close',function(error){ //console.log("Data inserted sucessfully"); should.not.exist(error) var copy_statement_perl = bt_parser.copy_perlhash_statement('perlhash') var perlwriter = client.copyFrom( copy_statement_perl ); parser_instance.perl_write(perlwriter) perlwriter.on('close',function(err){ parser_instance.perl_parser(client,function(e){ parser_instance.perl_truncate(client,callback) return null }) }) }) parser_instance=bt_parser(_reader,writer) } files.forEach(function(f){ fq.defer(process_file,f) return null }) fq.awaitAll(function(error, results) { var q = queue(5); // setup tests var tasks=[] tasks.push(function(callback){ client.query('select * from '+config.postgresql.table,function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf (1210 + 50) //console.log(d.rows.length) d.rows.forEach(function(row,i){ row.should.have.keys( 'id' ,'ts' ,'radar_lane_id' ,'station_lane_id' ,'name' ,'route' ,'direction' ,'postmile' ,'enabled' ,'firmware' ,'sample_interval' ,'lastpolltime' ,'lastgoodpoll' ,'speed' ,'speed_units' ) }) return callback() }) }) tasks.push(function(callback){ client.query('select * from perlhash',function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf(0) //because I called truncate //console.log(d.rows.length) return callback() }) }) tasks.push(function(callback){ client.query('select * from bt_xml_project',function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf(1) return callback() }) }) tasks.push(function(callback){ client.query('select * from bt_xml_location',function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf(5) return callback() }) }) tasks.push(function(callback){ client.query('select * from bt_xml_segment',function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf(8) return callback() }) }) tasks.push(function(callback){ client.query('select * from bt_xml_observation',function(e,d){ should.not.exist(e) should.exist(d) d.should.have.property('rows').with.lengthOf(1210+50) return callback() }) }) tasks.forEach(function(t) { q.defer(t); }); q.awaitAll(function(error, results) { //console.log("all done with db checks") client_done() pg.end() return done() }) }) // open a reader return null })
before(callback => { const queue = new Queue(1) queue.defer(callback => resetModels(callback)) queue.defer(callback => scaffold((err, _models) => callback(err, _.extend(models, _models)))) queue.await(callback) })