function runQueries(){ console.log("Querying for job id: " + job_id) Job.findById(job_id).populate("_owner").exec(this.parallel()) console.log("Querying for last 20 jobs for " + repo_url) Job.find() .sort({'finished_timestamp': -1}) .where('finished_timestamp').ne(null) .where('repo_url',repo_url) .where('type').in(['TEST_ONLY','TEST_AND_DEPLOY']) .limit(20) .populate("_owner") .exec(this.parallel()) },
"Job emits 'scheduled' event with 'run at' Date": function(test) { test.expect(1); var date = new Date(Date.now() + 3000); var job = new schedule.Job(function() { test.done(); }); job.on('scheduled', function(runAtDate) { test.equal(runAtDate, date); }); job.schedule(date); clock.tick(3250); }
"Runs job once at some date": function(test) { test.expect(1); var job = new schedule.Job(function() { test.ok(true); }); job.schedule(new Date(Date.now() + 3000)); setTimeout(function() { test.done(); }, 3250); clock.tick(3250); },
"Run job on specified date": function(test) { test.expect(1); var job = new schedule.Job(function() { test.ok(true); }); job.runOnDate(new Date(Date.now() + 3000)); setTimeout(function() { test.done(); }, 3250); clock.tick(3250); },
"Cancel next job before it runs": function(test) { test.expect(1); var job = new schedule.Job(function() { test.ok(true); }); job.schedule(new Date(Date.now() + 1500)); job.schedule(new Date(Date.now() + 3000)); job.cancelNext(); setTimeout(function() { test.done(); }, 3250); clock.tick(3250); },
"Job emits 'run' event": function(test) { test.expect(1); var job = new schedule.Job(function() {}); job.on('run', function() { test.ok(true); }); job.schedule(new Date(Date.now() + 3000)); setTimeout(function() { test.done(); }, 3250); clock.tick(3250); }
function runQueries(err, repo_list){ this.repo_list = repo_list; console.debug("Querying for last 100 jobs across the system"); if (req.param("limit_by_user")) { Job.find() .sort({'finished_timestamp': -1}) .where("_owner",req.param("limit_by_user")) .populate("_owner") .limit(100) .exec(this); } else { Job.find() .sort({'finished_timestamp': -1}) .populate("_owner") .limit(100) .exec(this); } } ,
'no startTime , endTime greater than now': function (test) { test.expect(2); var job = new schedule.Job(function () { test.ok(true); }); job.schedule({ end: new Date(Date.now() + 2000), rule: '*/1 * * * * *' }); setTimeout(function () { test.done(); }, 3250); clock.tick(3250); },
'no endTime , startTime less than now': function (test) { test.expect(3); var job = new schedule.Job(function () { test.ok(true); }); job.schedule({ start: new Date(Date.now() - 2000), rule: '*/1 * * * * *' }); setTimeout(function () { test.done(); }, 3250); clock.tick(3250); },
"Runs job at interval based on object, repeating indefinitely": function(test) { test.expect(3); var job = new schedule.Job(function() { test.ok(true); }); job.schedule({ second: null // fire every second }); setTimeout(function() { job.cancel(); test.done(); }, 3250); clock.tick(3250); },
'no startTime , endTime less than now': function (test) { test.expect(0); var job = new schedule.Job(function () { test.ok(true); }); job.schedule({ end: new Date(Date.now() - 2000), rule: { second: null } }); setTimeout(function () { test.done(); }, 3250); clock.tick(3250); },
'has startTime and endTime': function (test) { test.expect(1); var job = new schedule.Job(function () { test.ok(true); }); job.schedule({ start: new Date(Date.now() + 1000), end: new Date(Date.now() + 2000), rule: '*/1 * * * * *' }); setTimeout(function () { test.done(); }, 3250); clock.tick(3250); }
"Runs job at interval based on recur rule, repeating indefinitely": function(test) { test.expect(3); var job = new schedule.Job(function() { test.ok(true); }); var rule = new schedule.RecurrenceRule(); rule.second = null; // fire every second job.schedule(rule); setTimeout(function() { job.cancel(); test.done(); }, 3250); clock.tick(3250); },
_.each(this.repo_list, function(configured_repo) { Job.findOne() .sort({'finished_timestamp': -1}) .where('type').in(['TEST_ONLY','TEST_AND_DEPLOY']) .where('finished_timestamp').ne(null) .where('archived_timestamp', null) .where('repo_url', configured_repo.url) .populate("_owner") .lean(true) .exec(group()); });
'no startTime , endTime greater than now': function (test) { test.expect(2); var job = new schedule.Job(function () { test.ok(true); }); var rule = new schedule.RecurrenceRule(); rule.second = null; // every second job.schedule({ end: new Date(Date.now() + 2000), rule: rule }); setTimeout(function () { test.done(); }, 3250); clock.tick(3250); },
"Prevents all future invocations": function(test) { test.expect(1); var job = new schedule.Job(function() { test.ok(true); }); job.schedule({ second: null // fire every second }); setTimeout(function() { job.cancel(); }, 1250); setTimeout(function() { test.done(); }, 2250); clock.tick(2250); },
"Can cancel Jobs scheduled with Job#schedule": function(test) { test.expect(2); var job = new schedule.Job(function() { test.ok(true); }); job.schedule({ second: null }); setTimeout(function() { schedule.cancelJob(job); }, 2250); setTimeout(function() { test.done(); }, 3250); clock.tick(3250); },
function runQueries(err, repo_config){ if (err || !repo_config) { res.statusCode = 500; res.end("you must configure " + repo_url + " before you can use it"); return; } this.repo_config = repo_config; console.log("Querying for job id: " + job_id); Job.findById(job_id).populate("_owner").lean(true).exec(this.parallel()); console.log("Querying for last 20 jobs for " + repo_url); Job.find() .sort({'finished_timestamp': -1}) .where('finished_timestamp').ne(null) .where('repo_url',this.repo_config.url) .where('archived_timestamp', null) .where('type').in(['TEST_ONLY','TEST_AND_DEPLOY']) .limit(20) .populate("_owner") .lean(true) .exec(this.parallel()); },
"Job emits 'scheduled' event for every next invocation": function(test) { // Job will run 3 times but be scheduled 4 times, 4th run never happens // due to cancel. test.expect(4); var job = new schedule.Job(function() {}); job.on('scheduled', function(runOnDate) { test.ok(true); }); job.schedule({ second: null // Fire every second }); setTimeout(function() { job.cancel(); test.done(); }, 3250); clock.tick(3250); }
"Job emits 'canceled' event": function(test) { test.expect(1); var job = new schedule.Job(function() {}); job.on('canceled', function() { test.ok(true); }); job.schedule({ second: null // fire every second }); setTimeout(function() { job.cancel(); }, 1250); setTimeout(function() { test.done(); }, 2250); clock.tick(2250); }
exports.badge = function(req, res) { res.statusCode = 200; var user = req.params.user; var org = req.params.org; var repo = req.params.repo; var repo_url = "https://github.com/" + org + "/" + repo; function sendBadge(name) { res.setHeader("Cache-Control", "no-cache"); return res.redirect('/images/badges/build_' + name + '.png'); } // Ignore if can't parse as ObjectID try { user = new mongoose.Types.ObjectId(user); } catch(e) { console.debug('[badge] invalid user ObjectID', user); return sendBadge('unknown'); } Job.findOne() .sort({'finished_timestamp': -1}) .where('finished_timestamp').ne(null) .where('archived_timestamp', null) // FIXME: is it always lowercase? .where('repo_url', repo_url.toLowerCase()) .where('_owner', user) .where('type').in(['TEST_ONLY','TEST_AND_DEPLOY']) .exec(function(err, job) { if (err || !job) { if (err) { console.debug('[badge] error looking for latest build', err.message); } return sendBadge('unknown'); } if (job.test_exitcode === 0) return sendBadge('passing'); return sendBadge('failing'); }); };
lookup(repo_url, function(err, repo_config) { if (err || repo_config === undefined) { res.statusCode = 500; res.end("you must configure " + repo_url + " before you can use it"); return; } Job.find() .sort({'finished_timestamp': -1}) .where('finished_timestamp').ne(null) .where('archived_timestamp', null) .where('repo_url', repo_config.url) .where('type').in(['TEST_ONLY','TEST_AND_DEPLOY']) .limit(20) .lean(true) .populate("_owner") .exec(function(err,results) { if (err) throw err; _.each(results, function(job) { job.duration = Math.round((job.finished_timestamp - job.created_timestamp)/1000); job.finished_at = humane.humaneDate(job.finished_timestamp); job.triggered_by_commit = false; if (job.github_commit_info !== undefined && job.github_commit_info.id !== undefined) { job.triggered_by_commit = true; job.gravatar_url = 'https://secure.gravatar.com/avatar/' + crypto.createHash('md5').update(job.github_commit_info.author.email).digest("hex") + '.jpg?' + 'd=' + encodeURIComponent('identicon'); if (job.github_commit_info.author.username != undefined) { job.committer = job.github_commit_info.author.username; job.committer_is_username = true; } else { job.committer = job.github_commit_info.author.name; job.committer_is_username = false; } } job.id = job._id.toString(); job.url = "/" + org + "/" + repo + "/job/" + job.id; }); if (results.length === 0) { return res.end('no jobs for this build'); } var triggered_by_commit = false; if (results[0].github_commit_info !== undefined && results[0].github_commit_info.id !== undefined) { triggered_by_commit = true; } results[0].output = filter(results[0].stdmerged); res.render('latest_build.html', { admin_view: false, jobs: results, results_detail: results[0], triggered_by_commit: triggered_by_commit, org:org, repo:repo, repo_url:repo_config.url, has_prod_deploy_target:repo_config.has_prod_deploy_target }); }); });
setTimeout(function() { job.cancel(); test.done(); }, 3250);
setTimeout(function() { job.cancel(); }, 1250);