localRepo.status(function(err, status) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } // skip update, just run done after 5s if (dryRun) { logger.log('info', '[autoupdate] Simulating update...'); setTimeout(function() { that._postExtract(err, oldConfig, commit, callback); }, 3000); return; } // fetches and rebases from remote repository var update = function(popStash, callback) { logger.log('info', '[autoupdate] Fetching update from GitHub'); localRepo.remote_fetch('origin master', function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } logger.log('info', '[autoupdate] Rebasing to ' + commit.sha); localRepo.git('rebase ' + commit.sha, function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } // if stashed, re-apply changes. if (popStash) { logger.log('info', '[autoupdate] Re-applying stash'); localRepo.git('stash', {}, ['apply'], function() { that._postExtract(err, oldConfig, commit, callback); }); } else { that._postExtract(err, oldConfig, commit, callback); } }); }); }; // check for tracked changed files var trackedFiles = []; for (var filename in status.files) { if (status.files.hasOwnProperty(filename) && status.files[filename].tracked) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } trackedFiles.push(filename); } } // if found, stash changes if (trackedFiles.length > 0) { logger.log('info', '[autoupdate] Detected changed files: [' + trackedFiles.join(', ') + '], stashing changes first.'); localRepo.git('stash', {}, ['save'], function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } update(true, callback); }); } else { update(false, callback); } });
repo.tags(function(err, tagArray) { if (err) { return callback(err); } // loop through tags and try to match the one from package.json var tags = {}; var olderTags = []; var matchedTag; var cleanPackageVer = semver.clean(packageVersion); for (var i = 0; i < tagArray.length; i++) { var tag = tagArray[i]; if (semver.valid(tag.name)) { var cleanTagVer = semver.clean(tag.name); if (cleanTagVer == cleanPackageVer) { matchedTag = tag; break; } if (semver.lt(cleanTagVer, cleanPackageVer)) { olderTags.push(cleanTagVer); } tags[cleanTagVer] = tag; } } if (matchedTag) { // retrieve commit that._getCommit(matchedTag.commit.url, function(err, commit) { that.setVersion(commit, packageVersion); callback(null, version); }); // no match. that means the local copy isn't a tagged version // but something like 0.0.3-pre. in this case, find the last // modified file and retrieve the first commit after. } else { logger.log('info', '[autoupdate] Local copy is not a tagged version (%s), trying to match a commit on GitHub.', packageVersion); var lastModifiedTime = 0; var lastModifiedFile, time; readdirp({ root: path.normalize(__dirname + '../../../'), directoryFilter: [ '!.git', '!node_modules', '!.idea' ], fileFilter: [ '!pinemhi.ini', '!*.log' ] }).on('data', function(entry) { time = +new Date(entry.stat.mtime); if (time > lastModifiedTime) { lastModifiedTime = time; lastModifiedFile = entry.path; } }).on('end', function(err) { if (err) { return callback(err); } var lastModifiedDate = new Date(lastModifiedTime); logger.log('info', '[autoupdate] Last modified file is "%s", changed: %s.', lastModifiedFile, lastModifiedDate, {}); logger.log('info', '[autoupdate] Finding nearest commit..'); repo.commits(function(err, commits) { var commit, commitTime, lastCommit; var found = false; for (var i = 0; i < commits.length; i++) { commit = commits[i]; commitTime = +new Date(commit.commit.committer.date); if (commitTime < lastModifiedTime) { if (!lastCommit) { lastCommit = commit; } found = true; break; } lastCommit = commit; } if (found) { logger.log('info', '[autoupdate] Found commit "%s" from %s.', lastCommit.sha.substr(0, 7), new Date(commitTime), {}); that.setVersion(lastCommit, packageVersion); callback(null, version); } else { logger.log('error', '[autoupdate] More than 30 new commits, please update and try again.'); logger.log('info', 'Goodbye, killing myself.'); process.kill(process.pid, 'SIGTERM'); } }); }); } });
that._getCommit('https://api.github.com/repos/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/commits/' + sha, function(err, commit) { if (err) { logger.log('error', '[autoupdate] Cannot retrieve commit for revision %s: %s', sha, err); return that._logResult('Cannot retrieve commit for revision "' + sha + '": ' + err, startedAt, version.sha, sha, null, callback); } // make sure we're not downgrading var v = that._readVersion(); if (!dryRun && Date.parse(commit.commit.committer.date) < Date.parse(v.date)) { err = 'Not downgrading current version (' + v.date + ') to older commit (' + commit.commit.committer.date + ').'; logger.log('info', '[autoupdate] ERROR: ' + err); return that._logResult(err, startedAt, version.sha, sha, null, callback); } that.emit('updateStarted'); var pindPath = path.normalize(__dirname + '../../../'); // if git repo is available, update via git if (localRepo) { // look for modified files via status localRepo.status(function(err, status) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } // skip update, just run done after 5s if (dryRun) { logger.log('info', '[autoupdate] Simulating update...'); setTimeout(function() { that._postExtract(err, oldConfig, commit, callback); }, 3000); return; } // fetches and rebases from remote repository var update = function(popStash, callback) { logger.log('info', '[autoupdate] Fetching update from GitHub'); localRepo.remote_fetch('origin master', function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } logger.log('info', '[autoupdate] Rebasing to ' + commit.sha); localRepo.git('rebase ' + commit.sha, function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } // if stashed, re-apply changes. if (popStash) { logger.log('info', '[autoupdate] Re-applying stash'); localRepo.git('stash', {}, ['apply'], function() { that._postExtract(err, oldConfig, commit, callback); }); } else { that._postExtract(err, oldConfig, commit, callback); } }); }); }; // check for tracked changed files var trackedFiles = []; for (var filename in status.files) { if (status.files.hasOwnProperty(filename) && status.files[filename].tracked) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } trackedFiles.push(filename); } } // if found, stash changes if (trackedFiles.length > 0) { logger.log('info', '[autoupdate] Detected changed files: [' + trackedFiles.join(', ') + '], stashing changes first.'); localRepo.git('stash', {}, ['save'], function(err) { if (err) { return that._logResult(err, startedAt, version.sha, sha, null, callback); } update(true, callback); }); } else { update(false, callback); } }); // otherwise, update via zipball } else { // download zipball var url = 'https://github.com/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/archive/' + sha + '.zip'; var dest = settings.pind.tmp + '/node-pind-' + sha + '.zip'; var stream = fs.createWriteStream(dest); var failed = false; // when download completed stream.on('close', function() { if (failed) { return that._logResult('Download of zipball from GitHub failed (see logs).', startedAt, version.sha, sha, null, callback); } logger.log('info', '[autoupdate] Done, extracting now...'); // unzip each entry, trimming the first level of the folder structure. fs.createReadStream(dest) .pipe(unzip.Parse()) .on('entry', function(entry) { if (entry.type == 'File') { var entryDest = path.normalize(pindPath + entry.path.substr(entry.path.indexOf('/') + 1)); var dir = path.dirname(entryDest); if (!fs.existsSync(dir)) { mkdirp.sync(dir); } if (dryRun) { logger.log('info', '[autoupdate] (Not) extracting %s', entryDest); entry.autodrain(); } else { logger.log('info', '[autoupdate] Extracting %s', entryDest); entry.pipe(fs.createWriteStream(entryDest)); } } else { entry.autodrain(); } }).on('close', function() { logger.log('info', '[autoupdate] Done, cleaning up %s', dest); fs.unlinkSync(dest); that._postExtract(err, oldConfig, commit, callback); }); }); request(url).on('response', function(response) { if (response.statusCode != 200) { failed = true; logger.log('error', '[autoupdate] Failed downloading zip file at %s with code %s.', url, response.statusCode); return; } if (response.headers['content-length']) { logger.log('info', '[autoupdate] Downloading %s of zipball to %s...', filesize(response.headers['content-length'], true), dest); } else { logger.log('info', '[autoupdate] Downloading zipball to %s...', dest); } }).pipe(stream); } });
AutoUpdate.prototype._getCommits = function(fromSha, toSha, callback, result) { if (localRepo) { logger.log('info', '[autoupdate] Retrieving commits %s..%s from local Git repository.', fromSha.substr(0,7), toSha.substr(0,7)); if (!result) { result = { page: 0, started: false, ended: false, commits: [] }; } localRepo.commits('master', 100, result.page * 10, function(err, commits) { if (err) { return callback(err); } _.sortBy(commits, function(commit) { return -commit.committed_date.getTime(); }); for (var i = 0; i < commits.length; i++) { var sha = commits[i].id; if (sha == toSha) { result.started = true; } else if (!result.started) { } if (result.started) { result.commits.push({ sha: sha, date: commits[i].committed_date, message: commits[i].message }); } if (sha == fromSha) { result.ended = true; break; } } if (!result.ended) { if (commits.length > 0) { result.page++; AutoUpdate.prototype._getCommits(fromSha, toSha, callback, result); } else { logger.log('error', '[autoupdate] Ran through all commits but could not find commit with SHA %s.', toSha); callback('Ran through all commits but could not find commit with SHA ' + toSha + '.'); } } else { if (result.commits.length == 0) { logger.log('error', '[autoupdate] Ending commit %s seems to be before starting commit %s.', fromSha, toSha); callback('Ending commit ' + fromSha + ' seems to be before starting commit ' + toSha + '.'); } else { logger.log('info', '[autoupdate] Done, returning list of %d commits', result.commits.length); callback(null, result.commits); } } }); } else { logger.log('info', '[autoupdate] Retrieving commits %s..%s from GitHub.', fromSha.substr(0,7), toSha.substr(0,7)); var fromUrl = 'https://api.github.com/repos/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/commits/' + fromSha; var toUrl = 'https://api.github.com/repos/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/commits/' + toSha; AutoUpdate.prototype._getCommit(fromUrl, function(err, commitFrom) { if (err) { return callback(err); } if (!commitFrom.sha) { logger.log('error', '[autoupdate] Could not find starting commit %s on GitHub.', toSha); return callback('Could not find starting commit ' + toSha + ' on GitHub.'); } AutoUpdate.prototype._getCommit(toUrl, function(err, commitTo) { if (err) { return callback(err); } if (!commitTo.sha) { logger.log('error', '[autoupdate] Could not find ending commit %s on GitHub.', toSha); return callback('Could not find ending commit ' + toSha + ' on GitHub.'); } var result = []; var fetchCommits = function(url) { if (!url) { url = 'https://api.github.com/repos/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/commits' + '?since=' + commitTo.committer.date + '&until=' + commitFrom.committer.date + '&per_page=100'; } logger.log('info', '[autoupdate] Fetching %s', url); request({ url: url, headers: { 'User-Agent' : AutoUpdate.prototype._getUserAgent() } }, function(err, response, body) { if (err) { return callback(err); } var commits = JSON.parse(body); if (!_.isArray(commits)) { logger.log('error', '[autoupdate] Expected an array in return but got this: %s', body); return callback('Unexpected return from GitHub, check logs.'); } if (commits.length == 0) { logger.log('error', '[autoupdate] Got an empty list, that should not have happened. Either provided wrong SHAs (unlikely) or missed the end SHA.'); return callback('Unexpected return from GitHub, check logs.'); } for (var i = 0; i < commits.length; i++) { result.push({ sha: commits[i].sha, date: new Date(commits[i].commit.committer.date), message: commits[i].commit.message }); } // next page is in header, see http://developer.github.com/v3/#pagination if (response.headers.link) { var links = response.headers.link.split(','); var foundNext = false; for (i = 0; i < links.length; i++) { var link = links[i].split(';'); if (link[1].trim().match(/rel\s*=\s*["']next["']/i)) { fetchCommits(link[0].trim().replace(/^<|>$/g, '')); foundNext = true; break; } } if (!foundNext) { callback(null, result); } } else { callback(null, result); } }); }; logger.log('info', '[autoupdate] Found both commits on Github, now fetching commits in between.'); fetchCommits(); }); }); } };
AutoUpdate.prototype._getCommit(toUrl, function(err, commitTo) { if (err) { return callback(err); } if (!commitTo.sha) { logger.log('error', '[autoupdate] Could not find ending commit %s on GitHub.', toSha); return callback('Could not find ending commit ' + toSha + ' on GitHub.'); } var result = []; var fetchCommits = function(url) { if (!url) { url = 'https://api.github.com/repos/' + settings.pind.repository.user + '/' + settings.pind.repository.repo + '/commits' + '?since=' + commitTo.committer.date + '&until=' + commitFrom.committer.date + '&per_page=100'; } logger.log('info', '[autoupdate] Fetching %s', url); request({ url: url, headers: { 'User-Agent' : AutoUpdate.prototype._getUserAgent() } }, function(err, response, body) { if (err) { return callback(err); } var commits = JSON.parse(body); if (!_.isArray(commits)) { logger.log('error', '[autoupdate] Expected an array in return but got this: %s', body); return callback('Unexpected return from GitHub, check logs.'); } if (commits.length == 0) { logger.log('error', '[autoupdate] Got an empty list, that should not have happened. Either provided wrong SHAs (unlikely) or missed the end SHA.'); return callback('Unexpected return from GitHub, check logs.'); } for (var i = 0; i < commits.length; i++) { result.push({ sha: commits[i].sha, date: new Date(commits[i].commit.committer.date), message: commits[i].commit.message }); } // next page is in header, see http://developer.github.com/v3/#pagination if (response.headers.link) { var links = response.headers.link.split(','); var foundNext = false; for (i = 0; i < links.length; i++) { var link = links[i].split(';'); if (link[1].trim().match(/rel\s*=\s*["']next["']/i)) { fetchCommits(link[0].trim().replace(/^<|>$/g, '')); foundNext = true; break; } } if (!foundNext) { callback(null, result); } } else { callback(null, result); } }); }; logger.log('info', '[autoupdate] Found both commits on Github, now fetching commits in between.'); fetchCommits(); });
app.post('/sixty:', function(req, res){ console.log('got post of ' + req.body.comment); winston.log('info', req.body.comment); res.send('Thanks!'); });
server.on('error', function(error) { logger.log('info','Server Error', error); });
server.listen(80, undefined, function(){ logger.log('info', 'Server listening on %d, in %s mode', 80, app.get('env')); });
function(err, stdout, stderr, exitCode) { if (err) { winston.log('error', util.format('%s: Git clone failed', projNick)); winston.log('error', 'STDOUT:'); winston.log('error', stdout); winston.log('error', 'STDERR:'); winston.log('error', stderr); winston.log('error', 'EXIT CODE: ' + exitCode); winston.log('error', err.toString()); } winston.log('info', util.format('%s: Git clone complete', projNick)); var repoPath = project.repo.path; var srcPath; if (repoPath) { srcPath = path.join(tmpDir, repoPath); winston.log('info', util.format('%s: Sending files from repo directory %s', projNick, repoPath)); } else { winston.log('info', util.format('%s: Sending all files from repo', projNick)); } winston.log('debug', util.format('%s: Full path to files being sent: %s', projNick, srcPath)); scpHandler.scpToProject(project, srcPath, function(err) { if (err) { if (err instanceof scpHandler.NoAuthException) { winston.log('error', err.toString()); } else { winston.log('error', util.format('%s: scp failed', projNick)); winston.log('error', err.toString()); } } else { winston.log('info', util.format('%s: scp complete', projNick)); } rimraf(tmpDir, function(err) { if (err) { winston.log('error', util.format('%s: Temp dir not removed: %s', projNick, tmpDir)); winston.log('error', err.toString()); } else { winston.log('info', util.format('%s: Temp dir removed: %s', projNick, tmpDir)); } }); }); });
/* Check if posted hook data has a commit to Master AND corresponds to a * project in the config file. * If both are true, run that project's SCP job. * * Assumes hookData is sanitized and in Bitbucket webhook format. */ function handle(hookData) { var owner = hookData.repository.owner; var slug = hookData.repository.slug; var project = getProject(owner, slug); var projNick = util.format('%s/%s', owner, slug); winston.log('info', util.format('%s: Received hook', projNick)); if (!project) { winston.log('info', util.format('%s: No project found', projNick)); return; } winston.log('debug', 'Project repo info:'); winston.log('debug', util.inspect(project.repo)); winston.log('debug', 'Hook commit info:'); winston.log('debug', util.inspect(hookData.commits)); if (!hasMasterCommit(hookData.commits)) { winston.log('info', util.format('%s: No master commits found', projNick)); return; } winston.log('info', util.format('%s: Running project', projNick)); tmp.dir(function(err, tmpDir) { if (err) winston.log('error', err.toString()); winston.log('info', util.format('%s: Temp dir created: %s', projNick, tmpDir)); var repoUrl = project.repo.url; var keyPath = project.repo.privateKey; winston.log('info', util.format('%s: Cloning %s', projNick, repoUrl)); gitHandler.cloneInto(repoUrl, tmpDir, keyPath, function(err, stdout, stderr, exitCode) { if (err) { winston.log('error', util.format('%s: Git clone failed', projNick)); winston.log('error', 'STDOUT:'); winston.log('error', stdout); winston.log('error', 'STDERR:'); winston.log('error', stderr); winston.log('error', 'EXIT CODE: ' + exitCode); winston.log('error', err.toString()); } winston.log('info', util.format('%s: Git clone complete', projNick)); var repoPath = project.repo.path; var srcPath; if (repoPath) { srcPath = path.join(tmpDir, repoPath); winston.log('info', util.format('%s: Sending files from repo directory %s', projNick, repoPath)); } else { winston.log('info', util.format('%s: Sending all files from repo', projNick)); } winston.log('debug', util.format('%s: Full path to files being sent: %s', projNick, srcPath)); scpHandler.scpToProject(project, srcPath, function(err) { if (err) { if (err instanceof scpHandler.NoAuthException) { winston.log('error', err.toString()); } else { winston.log('error', util.format('%s: scp failed', projNick)); winston.log('error', err.toString()); } } else { winston.log('info', util.format('%s: scp complete', projNick)); } rimraf(tmpDir, function(err) { if (err) { winston.log('error', util.format('%s: Temp dir not removed: %s', projNick, tmpDir)); winston.log('error', err.toString()); } else { winston.log('info', util.format('%s: Temp dir removed: %s', projNick, tmpDir)); } }); }); }); }); }