return copyDataFile().then(function () { var def = vow.defer(), options = target.getOptions() || config.get('server') || { host: '127.0.0.1', port: 3000 }, host = options.host, port = options.port, url = util.format('http://%s:%s/publish/%s/%s', host, port, target.getSourceName(), target.ref); if (target.isDryRun) { logger.info('Publish command was launched in dry run mode', module); logger.info(util.format('Tarball data should be loaded to host: %s port: %s', host, port), module); return vow.resolve(); } logger.info(util.format('Tarball data will be loaded to %s', url), module); fstream.Reader({ path: path.join(target.getOutputPath(), constants.DIRECTORY.TEMP), type: 'Directory' }) .pipe(tar.Pack()) .pipe(zlib.Gzip()) .pipe(request.post(url)) .on('error', function (err) { logger.error(util.format('publish tarball error %s', err), module); def.reject(err); }) .on('end', function () { logger.info(util.format('publish tarball send to %s', url), module); def.resolve(target); }); return def.promise(); });
exports.run = function(opts, callback) { var builder = this.builder, sourcePath = builder.getSourcePath(), targetPackage = path.resolve(builder.pkgInfo.name + '-' + builder.pkgInfo.version + '.tar.gz'), log = builder.log.bind(builder, callback); // pack the files into a tar.gz archive // code courtesy of @izs wonderful section from npm // ==> https://github.com/isaacs/npm/blob/master/lib/utils/tar.js#L86 fstream.Reader({ type: 'Directory', path: sourcePath, filter: function() { // filter out repo files // TODO: do this properly... return this.path.indexOf('.git') < 0; } }) .on('error', log(new Error('Could not read path: ' + sourcePath))) .pipe(tar.Pack()) .on('error', log(new Error('Unable to create tar: ' + targetPackage))) .pipe(zlib.Gzip()) .on('error', log(new Error('Unable to gzip package: ' + targetPackage))) .pipe(fstream.Writer({ type: 'File', path: targetPackage })) .on('error', log(new Error('Unable to write file: ' + targetPackage))) .on('close', callback); };
return new Promise(resolve => { const stream = fs.createWriteStream(`./releases/u_${os}.tar.gz`); stream.on('close', () => resolve()); return tar.pack(os_tmp_path) .pipe(zlib.Gzip()) .pipe(stream); });
function buildPackage(directory) { var folder = fstream.Reader({ path: directory, type: 'Directory', filter: function (entry) { // {path, basename, dirname, type} (type is "Directory" or "File") var basename = entry.basename // some files are *never* allowed under any circumstances // these files should always be either temporary files or // version control related files if (basename === '.git' || basename === '.lock-wscript' || basename.match(/^\.wafpickle-[0-9]+$/) || basename === 'CVS' || basename === '.svn' || basename === '.hg' || basename.match(/^\..*\.swp$/) || basename === '.DS_Store' || basename.match(/^\._/)) { return false } else { return true; } } }); var tarPack = tar.Pack(); var gzip = zlib.Gzip(); folder .on('error', function (er) { if (er) debug('Error reading folder') return gzip.emit('error', er) }); tarPack .on('error', function (er) { if (er) debug('tar creation error') gzip.emit('error', er) }); return folder.pipe(tarPack).pipe(gzip); }
return new Promise(function(resolve, reject) { var zlib = require('zlib'), tar = require('tar'), fstream = require('fstream'), input = self.gen_dir; var bufs = []; var packer = tar.Pack() .on('error', function(e) { reject(e); }); var gzipper = zlib.Gzip() .on('error', function(e) { reject(e); }) .on('data', function(d) { bufs.push(d); }) .on('end', function() { var buf = Buffer.concat(bufs); var name = self.projectName + '+Documentation'; self.blobClient.putFile(name+'.tar.gz',buf) .then(function (hash) { self.result.addArtifact(hash); resolve(); }) .catch(function(err) { reject(err); }) .done(); }); var reader = fstream.Reader({ 'path': input, 'type': 'Directory' }) .on('error', function(e) { reject(e); }); reader .pipe(packer) .pipe(gzipper); })
function pack() { var writer = typeof dest === 'string' ? fstream.Writer({ path: dest, type: 'File' }) : dest; var reader = fstream.Reader({ path: uri.dirname, type: 'Directory', // Write project.xml first so streaming readers can load it first. sort: function(basename) { return basename.toLowerCase() === 'project.xml' ? -1 : 1; }, filter: function(info) { if (info.props.basename[0] === '.') return false; if (info.props.basename[0] === '_') return false; if (info.props.type === 'Directory') return true; if (info.props.basename.toLowerCase() === 'project.xml') return true; var extname = path.extname(info.props.basename).toLowerCase(); if (extname === '.png') return true; if (extname === '.jpg') return true; if (extname === '.svg') return true; } }) .pipe(tar.Pack({ noProprietary:true })) .pipe(zlib.Gzip()) .pipe(writer); reader.on('error', callback); writer.on('error', callback); writer.on('end', callback); }
lock(targetTarball, function (er) { if (er) return cb(er) new Packer({ path: folder, type: "Directory", isDirectory: true }) .on("error", function (er) { if (er) log.error("tar pack", "Error reading " + folder) return cb(er) }) // By default, npm includes some proprietary attributes in the // package tarball. This is sane, and allowed by the spec. // However, npm *itself* excludes these from its own package, // so that it can be more easily bootstrapped using old and // non-compliant tar implementations. .pipe(tar.Pack({ noProprietary: !npm.config.get("proprietary-attribs") })) .on("error", function (er) { if (er) log.error("tar.pack", "tar creation error", targetTarball) cb(er) }) .pipe(zlib.Gzip()) .on("error", function (er) { if (er) log.error("tar.pack", "gzip error "+targetTarball) cb(er) }) .pipe(fstream.Writer({ type: "File", path: targetTarball })) .on("error", function (er) { if (er) log.error("tar.pack", "Could not write "+targetTarball) cb(er) }) .on("close", cb) })
export default async function genRsltPkg (ctx, next) { if (ctx.error) { await next() } let codePath = path.join(ctx.codeDir, '/diff-result') let gzipPath = path.join(ctx.codeDir, '/' + ctx.codeDirName + '.tar.gz') // create target dir if ( !await mkdir(ctx, codePath) ) return // copy the increment code to target dir if ( !await copy(ctx, ctx.newCode, path.join(codePath, '/diff-code')) ) return // generate the diff description json file if ( !await writeFile(ctx, path.join(codePath, '/diff-description.json'), JSON.stringify(ctx.diffResult)) ) return // generate result package as zip try { fstream.Reader({ 'path': codePath, 'type': 'Directory' }) // Read the source directory .pipe(tar.Pack()) // Convert the directory to a .tar file .pipe(zlib.Gzip()) // Compress the .tar file .pipe(fstream.Writer({ 'path': gzipPath })); // Give the output file name } catch (e) { error(ctx, 'error happens when generating .tar.gz file for ' + ctx.codeDir) return } await next() }
function copy() { var read = tilelive.createReadStream(fsrc, { type: 'pyramid', bounds:fsrc.data.bounds || source.extent(fsrc.data) }); read.on('error', function(err) { prog.emit('error', err); }); read.on('length', prog.setLength); var serialtiles = read.pipe(tilelive.serialize()).pipe(prog).pipe(zlib.Gzip()); var uploadprog; try { uploadprog = upload({ stream: serialtiles, account: tm.oauth().account, accesstoken: tm.oauth().accesstoken, mapid: mapid, mapbox: tm.config().mapboxauth }) } catch(err) { return prog.emit('error', err); } uploadprog .once('error', function(err) { prog.emit('error', err); }) .once('finished', finish); }
Mount.prototype.streamFile = function (p, fd, stat, etag, req, res, end) { var streamOpt = { fd: fd, start: 0, end: stat.size } var stream = fs.createReadStream(p, streamOpt) stream.destroy = function () {} // too late to effectively handle any errors. // just kill the connection if that happens. stream.on('error', function(e) { console.error('Error serving %s fd=%d\n%s', p, fd, e.stack || e.message) res.socket.destroy() end() }) if (res.filter) { stream = stream.pipe(res.filter) } if (this.opt.gzip !== false) { var gzstr = zlib.Gzip() var gz = getGz(p, req) stream.pipe(gzstr) } res.statusCode = 200 if (gz) { // we don't know how long it'll be, since it will be compressed. res.setHeader('content-encoding', 'gzip') gzstr.pipe(res) } else { if (!res.filter) res.setHeader('content-length', stat.size) stream.pipe(res) } stream.on('end', function () { process.nextTick(end) }) if (this.cache.content._cache.max > stat.size) { // collect it, and put it in the cache var key = fd + ':' + stat.size + ':' + etag var bufs = [] stream.on('data', function (c) { bufs.push(c) }) if (gzstr) { var gzbufs = [] gzstr.on('data', function (c) { gzbufs.push(c) }) gzstr.on('end', function () { var content = Buffer.concat(bufs) content.gz = Buffer.concat(gzbufs) this.cache.content.set(key, content) }.bind(this)) } } }
return new Promise( async (resolve, reject) => { let exportPath = this.app.get('exportPath') try { fsExtra.copySync(this.app.get('extensionsPath'), path.join(this.app.get('exportPath'), 'extensions') ) console.log("Copied extensions to exports folder") } catch (err) { console.error("Error copying extensions to exports folder.", err) } try { fsExtra.copySync(this.app.get('themesPath'), path.join(this.app.get('exportPath'), 'themes') ) console.log("Copied themes to exports folder") } catch (err) { console.error("Error copying themes to exports folder.", err) } try { fsExtra.copySync(this.app.get('uploadsPath'), path.join(this.app.get('exportPath'), 'images') ) console.log("Copied images to exports folder") } catch (err) { console.error("Error copying images to exports folder.", err) } if (!fs.existsSync(path.join(this.app.get('exportPath'), 'data'))){ fs.mkdirSync(path.join(this.app.get('exportPath'), 'data')); } let forZip = [] for (var i = 0; i < collections.length; i++) { try { let response = await this._find(collections[i]) let url = path.join(this.app.get('exportPath'), 'data', collections[i] + '.json') forZip.push({path: url, name: collections[i] + '.json'}) } catch(err) { console.log("Error exporting data", err); return reject(err) } if(i === collections.length - 1) { const folderWeWantToZip = self.app.get('exportPath'); params.res.writeHead(200, { 'Content-Type' : 'application/octet-stream', 'Content-Disposition' : 'attachment; filename=site_data.zip', 'Content-Encoding' : 'gzip' }); /* Read the source directory */ fstream.Reader({ 'path' : folderWeWantToZip, 'type' : 'Directory' }) .pipe(tar.Pack())/* Convert the directory to a .tar file */ .pipe(zlib.Gzip())/* Compress the .tar file */ .pipe(params.res); // Write back to the response, or wherever else... } } })
server.app.get("/export/targz", function(req, res) { res.header('Content-Type', 'application/octet-stream'); res.header('Content-Disposition', 'attachment; filename="workspace.tar.gz"'); res.header('Content-Encoding', 'gzip'); fstream.Reader({ 'path' : workspace.root, 'type' : 'Directory' }) .pipe(tar.Pack()) .pipe(zlib.Gzip()) .pipe(res); });
var getDirectoryCompressionStream = function(fstream, callback) { var agent = this.agent; //create agent archive logger.info('compressing directory to stream'); return fstream.Reader({ 'path': file, 'type': 'Directory' }) /* Read the source directory */ .pipe(tar.Pack()) /* Convert the directory to a .tar file */ .pipe(zlib.Gzip()); callback(); };
grunt.registerHelper('packer', function(input, dest, error) { var stream = input.pipe(tar.Pack()) .on('error', error('tar creation error' + dest)); // if it ends with .tgz, then Gzip it. if(path.extname(dest) === '.tgz') stream = stream.pipe(zlib.Gzip()); return stream.on('error', error('gzip error ' + dest)) .pipe(fstream.Writer({ type: 'File', path: dest })) .on('error', error('Could not write ' + dest)) .on('close', error()); });
], function (err, buildDescription) { if (err) { return done(err); } var stream = fstream.Reader({ path: buildDescription.directories.moduledir, type: "Directory", isDirectory: true }) .pipe(tar.Pack({ noProprietary: true })) .pipe(zlib.Gzip()) .pipe(new BufferedStream()); self.perform('build.output', buildDescription, stream, done); });
function zipFile(path, res, zipPath) { var fstream = require('fstream'), tar = require('tar'), zlib = require('zlib'); fstream.Reader({ 'path': path, 'type': 'Directory'}) /* Read the source directory */ .pipe(tar.Pack()) /* Convert the directory to a .tar file */ .pipe(zlib.Gzip()) /* Compress the .tar file */ .pipe(fstream.Writer({ 'path': zipPath})); /* Give the output file name */ setTimeout(deleteFolderRecursive, end_timeout, path); }
function acknowledge() { cleanup() // empty stream if (!body && !filename) return stream.end() // we can just use the utility method if (typeof body === 'string' || Buffer.isBuffer(body)) { if (!compress) return stream.end(body) zlib.gzip(body, function (err, body) { // doubt this would ever happen, // but be sure to destroy the stream in case of errors if (err) { onerror(err) stream.destroy() return } stream.end(body) }) return } // convert a filename to stream if (filename) body = fs.createReadStream(filename) // handle the stream body.on('error', destroy) if (compress) { body .pipe(zlib.Gzip(compressOptions)) .on('error', destroy) .pipe(stream) } else { body.pipe(stream) } // make sure we don't leak file descriptors when the client cancels these streams stream.on('error', destroy) stream.on('close', destroy) stream.on('finish', destroy) function destroy(err) { if (err) onerror(filterError(err)) dethroy(body) stream.removeListener('close', destroy) stream.removeListener('finish', destroy) } }
var pipeFileRepoToStream = function(fileRepo, destStream, callback) { if (!fileRepo || !fileRepo.name || !fileRepo.path) { if(callback) { callback(new Error("required file repo data missing")); } return; } var repoDir = pathlib.resolve(fileRepo.path+pathlib.sep+fileRepo.name); fstream.Reader({ 'path': repoDir, 'type': 'Directory' }) /* Read the source directory */ .pipe(tar.Pack()) /* Convert the directory to a .tar file */ .pipe(zlib.Gzip()) /* Compress the .tar file */ .pipe(destStream); };
function createTarGz(sourceDir, fileName, callback){ var reader = fstream.Reader({ 'path': sourceDir, 'type': 'Directory' }); var writer = fstream.Writer({ 'path': fileName }); var pack = tar.Pack(); var zip = zlib.Gzip(); reader.pipe(pack).pipe(zip).pipe(writer); writer.on('close', function(){ callback(); }); writer.on('error', function(err){ callback(err); }); }
module.exports = function(req, next){ var pack = tar.Pack() var zip = zlib.Gzip() var project = fsReader({ 'path': req.project, ignoreFiles: [".surgeignore"] }) project.addIgnoreRules(ignore) req.tarballPath = path.resolve("/tmp/", Math.random().toString().split(".")[1] + ".tar") var tarball = fs.createWriteStream(req.tarballPath) tarball.on("finish", function(e){ next() }) project.pipe(pack).pipe(zip).pipe(tarball) }
// // GenerateTarGz creates a .tar.gz file from contents in the src directory and // saves them in a dest file. // function GenerateTarGz(src, dest, cb) { debug("GenerateTarGz"); // A list of file extensions that should be packaged into the .tar.gz. // Files with all other file extenstions will be excluded to minimize the size // of the deployment transaction payload. var keep = [ ".go", ".yaml", ".json", ".c", ".h", ".pem" ]; // Create the pack stream specifying the ignore/filtering function var pack = tar.pack(src, { ignore: function (name) { // Check whether the entry is a file or a directory if (fs.statSync(name).isDirectory()) { // If the entry is a directory, keep it in order to examine it further return false; } else { // If the entry is a file, check to see if it's the Dockerfile if (name.indexOf("Dockerfile") > -1) { return false; } // If it is not the Dockerfile, check its extension var ext = path.extname(name); // Ignore any file who's extension is not in the keep list if (keep.indexOf(ext) === -1) { return true; } else { return false; } } } }) .pipe(zlib.Gzip()) .pipe(fs.createWriteStream(dest)); pack.on("close", function () { return cb(null); }); pack.on("error", function () { return cb(Error("Error on fs.createWriteStream")); }); }
function pack(folder, options) { options = options || {} if (typeof folder === 'string') { var filter = options.filter || function (entry) { return true; } folder = packer({ path: folder, type: 'Directory', isDirectory: true, ignoreFiles: options.ignoreFiles || ['.gitignore'], filter: function (entry) { // {path, basename, dirname, type} (type is "Directory" or "File") var basename = entry.basename // some files are *never* allowed under any circumstances // these files should always be either temporary files or // version control related files if (basename === '.git' || basename === '.lock-wscript' || basename.match(/^\.wafpickle-[0-9]+$/) || basename === 'CVS' || basename === '.svn' || basename === '.hg' || basename.match(/^\..*\.swp$/) || basename === '.DS_Store' || basename.match(/^\._/)) { return false } //custom excludes return filter(entry) } }) } // By default, npm includes some proprietary attributes in the // package tarball. This is sane, and allowed by the spec. // However, npm *itself* excludes these from its own package, // so that it can be more easily bootstrapped using old and // non-compliant tar implementations. var tarPack = tar.Pack({ noProprietary: options.noProprietary || false }) var gzip = zlib.Gzip() folder .on('error', function (er) { if (er) debug('Error reading folder') return gzip.emit('error', er) }) tarPack .on('error', function (er) { if (er) debug('tar creation error') gzip.emit('error', er) }) return folder.pipe(tarPack).pipe(gzip) }
return prom((fulfill, reject) => { //tar and gzip the directory if (fs.existsSync(filePath)) { fstream.Reader({path: filePath, type: 'Directory'}) .pipe(tar.Pack()) .pipe(zlib.Gzip()) .pipe(fstream.Writer(filePath + '.tar.gz')) .on('error', reject) .on('close', () => { process.stdout.write('\ncompressed to ' + filePath + '.tar.gz \n'); fulfill(filePath); }); } else { process.stdout.write('\nNo file to compressed to ' + filePath + '.tar.gz \n'); fulfill(filePath); } });
var tmpFile = tmp.file(function (err, tmpPath, fd) { console.info("Zipping up soure. Putting it in " + tmpPath); fstream.Reader({ 'path': '.', 'type': 'Directory' }) .pipe(tar.Pack()) .pipe(zlib.Gzip()) //.pipe(fstream.Writer({ 'path': tmpPath })) .pipe(fs.createWriteStream(tmpPath)) .on('finish', function() { // Move the zip to src/public/dist/dist.tar.gz console.info("Source zipped. Moving to src/public/dist/dist.tar.gz"); fstream.Reader({ 'path': tmpPath}) //.pipe(fstream.Writer({ 'path': 'src/public/dist/dist.tar.gz' })) .pipe(fs.createWriteStream('src/public/dist/dist.tar.gz')) .on('finish', function () { startApp(); }); }); });
var tmpFile = tmp.file(function (err, tmpPath, fd) { console.info("Starting to zip up source. Gathering it in <%s> and zipping. Please wait...", tmpPath); fstream.Reader({ 'path': '.', 'type': 'Directory' }) .pipe(tar.Pack()) .pipe(zlib.Gzip()) //.pipe(fstream.Writer({ 'path': tmpPath })) .pipe(fs.createWriteStream(tmpPath)) .on('finish', function() { // Move the zip to src/public/dist/dist.tar.gz var dest = "src/public/dist/dist.tar.gz"; console.info("Source zipped. Moving to <%s>", dest); fstream.Reader({ 'path': tmpPath}) //.pipe(fstream.Writer({ 'path': 'src/public/dist/dist.tar.gz' })) .pipe(fs.createWriteStream(dest)) .on('finish', function () { startApp(); }); }); });
gulp.task('archive', [ 'generate:package' ], function () { var packageFile = 'gimmie.tgz' var ignores = _([ 'node_modules', '.DS_Store', '.gitmodules', '.git', '.gitignore', 'package.json', 'gulpfile.js', packageFile ]) .inject(function(r, v) { r[v] = true return r }, {}) fstream.Reader({ path: __dirname, root: './', filter: function(fstream, file) { return !ignores[file.basename] } }).pipe(tar.Pack()) .pipe(zlib.Gzip()) .pipe(fstream.Writer({ 'path': packageFile })) })
module.exports = function modifyResponse(res, proxyRes, callback) { let contentEncoding = proxyRes; if (proxyRes && proxyRes.headers) { contentEncoding = proxyRes.headers['content-encoding']; // Delete the content-length if it exists. Otherwise, an exception will occur // @see: https://github.com/langjt/node-http-proxy-json/issues/10 if ('content-length' in proxyRes.headers) { delete proxyRes.headers['content-length']; } } let unzip, zip; // Now only deal with the gzip/deflate/undefined content-encoding. switch (contentEncoding) { case 'gzip': unzip = zlib.Gunzip(); zip = zlib.Gzip(); break; case 'deflate': unzip = zlib.Inflate(); zip = zlib.Deflate(); break; } // The cache response method can be called after the modification. let _write = res.write; let _end = res.end; if (unzip) { unzip.on('error', function (e) { console.log('Unzip error: ', e); _end.call(res); }); handleCompressed(res, _write, _end, unzip, zip, callback); } else if (!contentEncoding) { handleUncompressed(res, _write, _end, callback); } else { console.log('Not supported content-encoding: ' + contentEncoding); } };
function tarGzip(folder, outFile) { var deferred = when.defer(); var fstream = require('fstream'), tar = require('tar'), zlib = require('zlib'); var writable = fs.createWriteStream(outFile); writable.on('finish', function() { console.error('all writes are now complete.'); deferred.resolve(); }); /* Read the source directory */ fstream.Reader({ 'path' : folder, 'type' : 'Directory' }) .pipe(tar.Pack())/* Convert the directory to a .tar file */ .pipe(zlib.Gzip())/* Compress the .tar file */ .pipe(writable); return deferred.promise; }
Deployment.prototype.package = function (tarball, callback) { function filter(info) { if( info.type === 'Directory' && info.depth === 1 && info.basename === 'data' || info.basename === '.dpd' ) { return false; } return true; } fstream.Reader({ path: this.path, type: 'Directory', filter: filter }) .on('error', callback) .pipe(tar.Pack()) .on('error', callback) .pipe(zlib.Gzip()) .on('error', callback) .pipe(fstream.Writer({ type: "File", path: tarball })) .on('close', callback); };
function getLog(data, cb) { data = data || {}; var from; var till; //if there were no parameters supplied, return the whole file if (data.from && data.till && typeof data.from === "number" && typeof data.till === "number") { from = data.from; till = data.till; } else { from = 0; till = Date.now(); } var stream = fstream.Reader({ "path": path.resolve(process.cwd(), "./logs/"), "type": "Directory" }).pipe(tar.Pack()).pipe(zlib.Gzip()); cb (null, stream, null, "log-stream"); };