return through.obj(function(file, enc, done) { /* istanbul ignore next */ if (file.isNull()) { done(null, file); } file.path = file.path.replace(/\.gz$/, ''); var metadata = getMetadata(file); // Authenticate on Google Cloud Storage var storage = gcloud.storage({ keyFilename: options.keyFilename, projectId: options.projectId }); var bucket = storage.bucket(options.bucket); var gcPah = normalizePath(options.base, file); var gcFile = bucket.file(gcPah); file.pipe(gcFile.createWriteStream({metadata: metadata})) .on('error', done) .on('finish', function() { if (options.public) { return gcFile.makePublic(function(err) { logSuccess(gcPah); done(err, file); }); } logSuccess(gcPah); return done(null, file); }); });
function upload(stream) { if (!bucket) { bucket = gcloud.storage({ projectId: 'poetic-genius-139107', credentials: { private_key: process.env.GCS_PRIVATE_KEY, client_email: process.env.GCS_CLIENT_EMAIL, }, }).bucket('cast-feed'); } const id = uuid.v4(); const file = bucket.file(id); return new Promise((resolve, reject) => { stream.pipe(file.createWriteStream({ metadata: { contentType: 'image/jpeg', }, }) .on('error', reject) .on('finish', () => resolve(`https://storage.googleapis.com/cast-feed/${id}`)) ); }); }
module.exports.initBucket = function (config, bucket) { var gcs = gcloud.storage({ projectId: config.projectId, keyFilename: config.keyFilename, }); return gcs.bucket(bucket); };
module.exports = function (gcloudConfig, cloudStorageBucket) { var storage = gcloud.storage(gcloudConfig); var bucket = storage.bucket(cloudStorageBucket); // Returns the public, anonymously accessable URL to a given Cloud Storage // object. // The object's ACL has to be set to public read. function getPublicUrl (filename) { return 'https://storage.googleapis.com/' + cloudStorageBucket + '/' + filename; } // Express middleware that will automatically pass uploads to Cloud Storage. // req.file is processed and will have two new properties: // * ``cloudStorageObject`` the object name in cloud storage. // * ``cloudStoragePublicUrl`` the public url to the object. function sendUploadToGCS (req, res, next) { if (!req.file) { return next(); } var gcsname = Date.now() + req.file.originalname; var file = bucket.file(gcsname); var stream = file.createWriteStream(); stream.on('error', function (err) { req.file.cloudStorageError = err; next(err); }); stream.on('finish', function () { req.file.cloudStorageObject = gcsname; req.file.cloudStoragePublicUrl = getPublicUrl(gcsname); next(); }); stream.end(req.file.buffer); } // Multer handles parsing multipart/form-data requests. // This instance is configured to store images in memory and re-name to avoid // conflicting with existing objects. This makes it straightforward to upload // to Cloud Storage. var multer = require('multer')({ inMemory: true, fileSize: 5 * 1024 * 1024, // no larger than 5mb rename: function (fieldname, filename) { // generate a unique filename return filename.replace(/\W+/g, '-').toLowerCase() + Date.now(); } }); return { getPublicUrl: getPublicUrl, sendUploadToGCS: sendUploadToGCS, multer: multer }; };
function getGcloudBucket() { var storageOptions = {projectId: config.gcloud.projectID}; if ( !config.gae.inGAE ) { storageOptions.keyFilename = resources.gcloud.gcloudKeyFile } var gcs = gcloud.storage(storageOptions); return gcs.bucket(config.gcloud.cbbfBucket); }
var _getStorageClient = function() { if (storage === null) { storage = gcloud.storage({ // We're using the API from the same project as the Cloud Function projectId: process.env.GCP_PROJECT, }); } return storage; };
function getFileStream (bucketName, fileName) { if (!bucketName) { throw new Error('Bucket not provided. Make sure you have a ' + '"bucket" property in your request'); } if (!fileName) { throw new Error('Filename not provided. Make sure you have a ' + '"file" property in your request'); } // Create a gcs client. var gcs = gcloud.storage(); var bucket = gcs.bucket(bucketName); return bucket.file(fileName).createReadStream(); }
api.init = function (app, callback) { log = app.log.child({ module: api.name, bucketName: config.bucketName }); api._gcs = gcloud.storage(config.gopts); api._bucket = api._gcs.bucket(config.bucketName); async.waterfall([ function createBucket (callback) { return api._gcs.createBucket(config.bucketName, function onBucket (err, b) { var existMsg = 'You already own this bucket. Please select another name.'; if (err && err.message !== existMsg) { log.error({ err: err }, 'Error attempting to create the bucket'); return callback(err); } if (err) { log.info('Bucket already exists'); } else { log.info('Bucket has been created'); } return callback(null, b || api._bucket); }); }, function setPerms (b, callback) { api._bucket = b; var acl = { entity: 'allUsers', role: gcloud.storage.acl.READER_ROLE }; log.info({ acl: acl }, 'Configuring bucket ACL rules'); api._bucket.acl.default.add(acl, callback); } ], function (err) { if (err) { log.error({ err: err }, 'Unable to configure gcloud bucket'); return callback(err); } log.info('Bucket has been created and ACL permissions set'); return callback(); }); };
module.exports = function(gcloudConfig, cloudStorageBucket){ var storage = gcloud.storage(gcloudConfig); var bucket = storage.bucket(cloudStorageBucket); // Gets the image url in the cloud function getUrl(filename){ return 'https://storage.googleapis.com/' + cloudStorageBucket '/' + filename; } // Handles uploads to Google Cloud function sendToGCS(req, res, next){ if(!req.file) { return next(); } var gcsname = Date.now() + req.file.originalname; var file = bucket.file(gcsname); var stream = file.createWriteStream(); stream.on('error', function(err) { req.file.cloudStorageError = err; next(err); }); stream.on('finish', function() { req.file.cloudStorageObject = gcsname; req.file.cloudStoragePublicUrl = getUrl(gcsname); next(); }); stream.end(req.file.buffer); } // Lets multer manage file requests var multer = require('multer')({ inMemory: true, fileSize: 5 * 1024 * 1024, rename: function(fieldname,filename){ return filename.replace(/\W+/g, '-').toLowerCase() + Date.now(); } }); return { getUrl: getUrl, sendToGCS: sendToGCS, multer: multer }; };
constructor(options) { super(options); this.extensions = ['creation', 'creation-defer-length']; if (!options.bucket) { throw new Error('GCSDataStore must have a bucket'); } this.bucket_name = options.bucket; this.gcs = gcloud.storage({ projectId: options.projectId, keyFilename: options.keyFilename, }); this.bucket = this._getBucket(); this.authConfig = assign(DEFAULT_CONFIG, { keyFilename: options.keyFilename, }); }
wordCount: function(context, data) { var bucketName = data['bucket']; var fileName = data['file']; if (!bucketName) { context.failure( 'Bucket not provided. Make sure you have a \'bucket\' property in ' + 'your request'); return; } if (!fileName) { context.failure( 'Filename not provided. Make sure you have a \'file\' property in ' + 'your request'); return; } // Create a gcs client. var gcs = gcloud.storage({ // We're using the API from the same project as the Cloud Function. projectId: process.env.GCP_PROJECT, }); var bucket = gcs.bucket(bucketName); var file = bucket.file(fileName); var count = 0; // Use the readLine module to read the stream line-by line. var lineReader = readline.createInterface({ input: file.createReadStream(), }); lineReader.on('line', function(line) { count += line.trim().split(/\s+/).length; }); lineReader.on('close', function() { context.success('The file ' + fileName + ' has ' + count + ' words'); }); },
var gcloud = require('gcloud'); var bucket = gcloud.storage({ projectId: 'ten-veux', keyFilename: __dirname+'/../../config/gcloud.json' }).bucket('tenveuxmedia'); var fs = require('fs'); module.exports = { upload: function (params) { var stream = params.file; var name = params.name; var file = bucket.file(name); stream .pipe(file.createWriteStream()) .on('error', function(err) { params.error(err); }) .on('end', function () { setTimeout(function() { params.done(name); }, 3000);
var master = function(context, data) { // Create a gcs client var gcs = gcloud.storage({ // We're using the API from the same project as the Cloud Function. projectId: process.env.GCP_PROJECT, }); // Get the location (url) of the map function var fnUrl = data['workerFunctionUrl']; // Get the bucket containing our source file var bucket = gcs.bucket(data['bucket']); // Load the master file using the stream API console.log( 'Opening file [' + data['file'] + '] and creating a read stream...'); var inStream = bucket.file(data['file']).createReadStream() .on('error', function(err) { context.failure("Error reading file stream for " + data['file'] + ": " + err.message); return; }); // use the readLine module to read the stream line-by line console.log('Got stream, reading file line-by-line...'); var lineReader = require('readline').createInterface({input: inStream}); // Create an array to hold our request promises var promises = []; // We are going to batch the lines, we could use any number here var batch = []; var BATCH_SIZE = 3; lineReader.on('line', function(line) { if (batch.length === BATCH_SIZE) { // Send the batch. promises.push(invoke(fnUrl, batch, SHARED_KEY)); batch = []; } batch.push(line.trim()); }); lineReader.on('close', function() { // We might have trailing lines in an incomplete batch. if (batch.length > 0) { promises.push(invoke(fnUrl, batch, SHARED_KEY)); } Promise.all(promises).then( function(result) { console.log('All workers have returned'); // The result will be an array of return values from the workers. var count = 0; for (var i = 0; i < result.length; ++i) { count += parseInt(result[i]); } context.success( 'The file ' + data['file'] + ' has ' + count + ' words'); }, function(err) { console.error('Error!'); context.failure(err); }); }); };
function Filestore(repoName) { this.name = repoName; this.bucketName = Util.makeBucketName(repoName); this.storage = gcloud.storage(Auth); this.bucket = this.storage.bucket(this.bucketName); }
//var CLOUD_BUCKET = config.get('CLOUD_BUCKET'); /* * Reference the bucket variables declared in our config.js file: */ var PHOTO_BUCKET = config.get('PHOTO_BUCKET'); var VIDEO_BUCKET = config.get('VIDEO_BUCKET'); var OTHER_BUCKET = config.get('OTHER_BUCKET'); /* * Get the project id of our project by requesting project var from config.js * to gcloud method */ var storage = gcloud.storage({ projectId: config.get('GCLOUD_PROJECT') }); /* Functions that return the public, anonymously accessable URL to a given Cloud Storage * object. */ function getPhotoBucketUrl(filename) { return 'https://storage.googleapis.com/' + PHOTO_BUCKET + '/' + filename; } function getVideoBucketUrl(filename) { return 'https://storage.googleapis.com/' + VIDEO_BUCKET + '/' + filename; } function getOtherBucketUrl(filename) { return 'https://storage.googleapis.com/' + OTHER_BUCKET + '/' + filename; } // Express middleware that will automatically pass uploads to Cloud Storage.
var gcloud = require('gcloud'); // The following environment variables are set by app.yaml when running on GAE, // but will need to be manually set when running locally. // The storage client is used to communicate with Google Cloud Storage var storage = gcloud.storage({ projectId: process.env.GCLOUD_PROJECT }); // A bucket is a container for objects (files). var bucket = storage.bucket(process.env.GCLOUD_STORAGE_BUCKET); module.exports = bucket;
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. 'use strict'; // [START all] // [START setup] // By default, gcloud will authenticate using the service account file specified // by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the // project specified by the GCLOUD_PROJECT environment variable. See // https://googlecloudplatform.github.io/gcloud-node/#/docs/guides/authentication var gcloud = require('gcloud'); // Get a reference to the storage component var storage = gcloud.storage(); // [END setup] // [START list] /** * Lists files in a bucket. * * @param {string} name The name of the bucket. * @param {function} cb The callback function. */ function listFiles (name, callback) { if (!name) { return callback(new Error('"name" is required!')); } var bucket = storage.bucket(name);
import gcloud from 'gcloud'; import path from 'path'; const gcs = gcloud.storage({ projectId: 'knocknock-976', keyFilename: path.join(__dirname, 'gcloud-cert.json') }); export function uploadFile (bucket, filePath) { return new Promise((resolve, reject) => { gcs.bucket(bucket).upload(filePath, function (err, file, apiResponse) { if (err) { reject(err); } else { resolve(file); } }); }) } export function deleteFile (bucket, fileId) { return new Promise((resolve, reject) => { const file = gcs.bucket(bucket).file(fileId); if (!file) return resolve(); file.delete((err) => { if (err) { reject(err); } else { resolve(); }
module.exports = function(app, express, passport, fs, Busboy, _, io){ var router = express.Router(); var path = require('path'); var Puid = require('puid'); var jwt = require('jsonwebtoken'); var User = require('../models/user'); var File = require('../models/file'); var gcloud = require('gcloud'); var storage; storage = gcloud.storage({ projectId: 'main-aspect-584', keyFilename: path.resolve('./', 'key.json') }); router.get('/', function(req, res) { res.render('index'); }); router.post('/upload', function(req, res) { var busboy = new Busboy({headers : req.headers}); var originalDir = path.resolve('./tmp/'); var bucket = storage.bucket(req.user.id); busboy.on('error', function(err){ next(err); }); busboy.on('file', function(campo, stream, nomeArquivo, encoding, mimetype){ var gravar = fs.createWriteStream(originalDir+'/'+nomeArquivo); stream.pipe(gravar); var inStream = fs.createReadStream(originalDir+'/'+nomeArquivo); inStream.pipe(bucket.file(nomeArquivo).createWriteStream()); var file = new File({file_name: nomeArquivo, type: mimetype, user: req.user.id}); io.emit('news', file); file.save(function(err, fl){ if (err) throw err; return fl; }); }); busboy.on('end', function(){ res.send(200); }); req.pipe(busboy); }); router.get('/download/:name', function(req, res){ storage.bucket(req.user._id).file(req.params.name).createReadStream().pipe(res); }); router.get('/delete/:name', function(req, res){ var file = storage.bucket(req.user._id).file(req.params.name); file.delete(function(err, data){ if (err) throw new Error(err) res.sendStatus(204); }); }); router.get('/login', function(req, res) { res.render('user/login', { message: req.flash('loginMessage') }); }); app.post('/login', passport.authenticate('local-login', { successRedirect : '/profile', failureRedirect : '/login', failureFlash : true })); router.get('/signup', function(req, res) { res.render('user/signup', { message: req.flash('signupMessage') }); }); router.post('/signup', passport.authenticate('local-signup', { successRedirect : '/profile', failureRedirect : '/signup', failureFlash : true })); router.get('/profile', isLoggedIn, function(req, res) { res.render('user/profile', { user : req.user }); }); router.get('/files', isLoggedIn, function(req, res) { File.find({user: req.user.id}, function(err, files){ if (err) throw err; res.render('user/files', { files: files }); }); }); router.get('/logout', function(req, res) { req.logout(); res.redirect('/'); }); io.on('connection', function (socket) { console.log("Socket Connected"); }); app.use('/', router); };
var Place = require('mongoose').model('Place'); var fs = require('fs'); // Google Cloud SDK for NodeJS var gcloud = require('gcloud'); /* * Google cloud storage configuration */ var gcs = gcloud.storage({ projectId: 'utility-glider-130309', keyFilename: './server/config/disini-6b3b2077c500.json' }); var bucket = gcs.bucket('disini-upload'); exports.getPlaces = function(req, res) { var page = Number(req.params.page); var pageLimit = Number(req.params.limit); var query = {}; var options = { populate: 'address.city address.state owner categories', page: page, limit: pageLimit }; Place.paginate(query, options).then(function(result) { res.send(result); }); // Place
(function (VideoRouter) { 'use strict'; // [START config] // Multer is required to process file uploads and make them available via // req.files. // var multer = require('multer')({ // inMemory: true, // fileSize: 5 * 1024 * 1024 * 1024 // no larger than 5mb, you can change as needed. // }); // var multer = multer({ //multer settings // storage: mstorage // }); var mstorage = require('multer').diskStorage({ destination: function (req, file, cb) { cb(null, '../uploads/'); }, filename: function (req, file, cb) { var datetimestamp = Date.now(); cb(null, file.fieldname + '-' + datetimestamp + '.' + file.originalname.split('.')[file.originalname.split('.').length - 1]); } }); var multer = require('multer')({ storage: mstorage }); // The following environment variables are set by app.yaml when running on GAE, // but will need to be manually set when running locally. // The storage client is used to communicate with Google Cloud Storage var storage = gcloud.storage({ projectId: 'play-1376' }); // A bucket is a container for objects (files). var bucket = storage.bucket('play-video'); // [END config] VideoRouter.router = express_1.Router(); function handleVideos(videos, res, next) { async.each(videos, (video, done) => { // populate each video with its author video.populate('author', done); }, (popErr) => { if (popErr) { next(popErr); return; } // serialize and return res.status(200).json(video_serializer_1.videoSerializer.serialize(videos)); }); } VideoRouter.router.get('/', function (req, res, next) { if (req.query.author) { req.checkQuery('author', 'not an Object Id').isMongoId(); let errors = req.validationErrors(); if (errors) { res.status(403).json({ errors: errors, success: false, }); return; } video_1.Video.findByAuthor(req.query.author, (err, videos) => { handleVideos(videos, res, next); }); } else { video_1.Video.findAll((err, videos) => { handleVideos(videos, res, next); }); } }); //router.use(Authentication.authenticatedRoute); VideoRouter.router.post('/', function (req, res, next) { // validate the incoming data: console.log('creating a video'); req.checkBody('data.type', 'not a video record').equals('videos'); req.checkBody('data.attributes.title', 'missing').len(1); req.checkBody('data.attributes.videourl', 'missing').notEmpty(); req.checkBody('data.attributes.thumbnailurl', 'missing').notEmpty(); req.checkBody('data.attributes', 'missing').notEmpty(); let errors = req.validationErrors(); if (errors) { res.status(400).json({ errors: 'malformed JSON-API resource' }); return; } console.log('deserializing...'); new jsonApiSerializer.Deserializer().deserialize(req.body, (error, video) => { if (error) { console.log('deserialize video failed'); res.status(400).json({ errors: error.toString(), success: false }); return; } let mongooseVideo = new video_1.Video(video); mongooseVideo.save((saveErr) => { if (saveErr) { res.status(403).json({ error: saveErr.toString(), success: false }); return; } video_1.Video.findByAuthor(video.author, (err, videos) => { handleVideos(videos, res, next); }); }); }); }); VideoRouter.router.delete('/:id', function (req, res, next) { let prefix; let authorId; async.series([ (done) => { video_1.Video.findById(req.params.id, (err, video) => { if (err) { next(err); done(err); return; } prefix = video.videourl.split('/')[video.videourl.split('/').length - 1].split('.')[0]; authorId = video.author.toString(); done(); }); }, (done) => { video_1.Video.remove({ _id: req.params.id }, function (err) { if (err) { next(err); done(err); return; } done(); }); }, (done) => { bucket.deleteFiles({ prefix: prefix }, function (err) { if (err) { next(err); done(err); return; } done(); }); } ], (processErr) => { prefix = null; if (processErr) { res.status(403).json({ error: processErr.toString(), success: false }); } else { video_1.Video.findByAuthor(authorId, (err, videos) => { handleVideos(videos, res, next); }); } }); }); // [START process] // Process the file upload and upload to Google Cloud Storage. VideoRouter.router.post('/upload', multer.single('file'), function (req, res, next) { console.log("body: ", req.body); if (!req.file) { return res.status(400).send('No file uploaded.'); } ffmpeg(req.file.path) .inputFormat('mov') .screenshots({ count: 1, timestamps: [0], filename: req.file.path.split('.mov')[0] + '.png', folder: '../uploads/', size: '320x240' }).on('end', function () { console.log('Screenshots taken'); let filestoUpload = [req.file.path.split('.mov')[0] + '.png', req.file.path]; var publicUrl = []; //use async foreach to upload the local files to a new file to be created in your bucket. async.forEach(filestoUpload, (file, callback) => bucket.upload(file, function (err) { if (!err) { publicUrl.push(format('https://storage.googleapis.com/%s/%s', bucket.name, file.split('../uploads/')[1])); } callback(); }), function (err) { if (err) return next(err); //Tell the user about the great success res.status(200).send(JSON.stringify(publicUrl.sort())); }); }); }); })(VideoRouter || (VideoRouter = {}));
import chokidar from 'chokidar' import gcloud from 'gcloud' import config from './config' import readdir from './lib/readdir' const gcs = gcloud.storage({ projectId: config.project_id, keyFilename: config.key_filename }) const bucket = gcs.bucket(config.bucket_name) const watch = chokidar.watch(config.watch_dir, { ignoreInitial: true }) watch .on('ready', () => { console.log('ready') readdir(config.watch_dir, { gitignore: true }, (err, files) => { if (err) { console.error(err) } console.log(files) }) }) .on('add', (path) => { bucket.upload(path, (err, file) => { if (err) {
module.exports = function(config) { var gcloud = require('gcloud'); var dataset = gcloud.datastore.dataset({ projectId: config.projectId, keyFilename: config.keyFilename }); var storage = gcloud.storage({ projectId: config.projectId, keyFilename: config.keyFilename }); var bucket = storage.bucket(config.bucketName); function getAllBooks(callback) { var query = dataset.createQuery(['Book']); dataset.runQuery(query, callback); } function getUserBooks(userId, callback) { var query = dataset.createQuery(['Book']).filter('userId', '=', userId); dataset.runQuery(query, callback); } function addBook(title, author, coverImageData, userId, callback) { var entity = { key: dataset.key('Book'), data: { title: title, author: author, } }; if (userId) entity.data.userId = userId; if (coverImageData) uploadCoverImage(coverImageData, function(err, imageUrl) { if (err) return callback(err); entity.data.imageUrl = imageUrl; dataset.save(entity, callback); }); else dataset.save(entity, callback); } function deleteBook(bookId, callback) { var key = dataset.key(['Book', parseInt(bookId, 10)]); dataset.get(key, function(err, book) { if (err) return callback(err); if (book.data.imageUrl) { var filename = url.parse(book.data.imageUrl).path.replace('/', '') var file = bucket.file(filename); file.delete(function(err) { if (err) return callback(err); dataset.delete(key, callback); }); } else { dataset.delete(key, callback); } }); } function uploadCoverImage(coverImageData, callback) { // Generate a unique filename for this image var filename = '' + new Date().getTime() + "-" + Math.random() var file = bucket.file(filename); var imageUrl = 'https://' + config.bucketName + '.storage.googleapis.com/' + filename; var stream = file.createWriteStream(); stream.on('error', callback); stream.on('finish', function() { // Set this file to be publicly readable file.makePublic(function(err) { if (err) return callback(err); callback(null, imageUrl); }); }); stream.end(coverImageData); } return { getAllBooks: getAllBooks, getUserBooks: getUserBooks, addBook: addBook, deleteBook: deleteBook }; };
var fs = require('fs'), path = require('path'), _ = require('lodash'), NodeRSA = require('node-rsa'), key = new NodeRSA({b: 1024}), pubkey = key.exportKey('public'), privakey = key.exportKey('private'); var LocalStrategy = require('passport-local').Strategy; var User = require('../models/user'); var gcloud = require('gcloud'); var storage; storage = gcloud.storage({ projectId: 'main-aspect-584', keyFilename: path.resolve('./', 'key.json') }); module.exports = function(passport) { passport.serializeUser(function(user, done) { done(null, user.id); }); passport.deserializeUser(function(id, done) { User.findById(id, function(err, user) { done(err, user); }); }); passport.use('local-signup', new LocalStrategy({ usernameField : 'email',
GCSFile.prototype.storage = function () { return gcloud.storage(this.projectSettings); };
'use strict'; var assert = require('assert'); var gcloud = require('gcloud'); var request = require('request'); var gcs = gcloud.storage(); var myBucket = gcs.bucket('stephen-has-a-new-bucket'); var file = myBucket.file(Date.now()); var authClient = gcs.authClient; function saveThenCheckIfItSavedHttp(newContent, callback) { authClient.authorizeRequest({ method: 'POST', uri: 'https://www.googleapis.com/upload/storage/v1/b/' + myBucket.name + '/o', qs: { name: file.name, uploadType: 'media' }, body: newContent }, function(err, reqOpts) { assert.ifError(err); request(reqOpts, function(err) { assert.ifError(err); var publicUrl = 'https://storage.googleapis.com/' + myBucket.name + '/' + file.name; request.get(publicUrl, function(err, resp, body) { assert.ifError(err);
const gcloud = require('gcloud'); const Promise = require('bluebird'); const request = require('superagent'); const urlparser = require('url'); const hooks = require('./hooks'); const Video = require('./video-model'); // Middleware for handling file upload const VIDEO_SIZE = 5 * 1024 * 1024; const prepareMultipart = require('../../middleware/prepare-multipart')('video', VIDEO_SIZE); const attachFileToFeathers = require('../../middleware/attach-file-to-feathers')(); const CLOUD_BUCKET = 'staging.you-pin.appspot.com'; const gcs = gcloud.storage({ projectId: 'You-pin', keyFilename: './youpin_gcs_credentials.json' }); const bucket = gcs.bucket(CLOUD_BUCKET); function getPublicUrl (filename) { return 'https://storage.googleapis.com/' + CLOUD_BUCKET + '/' + filename; } function uploadToGCS(reqFile) { return new Promise(function (resolve, reject) { if (!reqFile) { return reject(new Error('No file provided')); } const gcsname = Date.now() + '_' + reqFile.originalname;
var fs = require('fs'); var request = require('request'); var gcloud = require('gcloud'); var gcs = gcloud.storage({ projectId: 'dola-gis-server', keyFilename: 'root/dola-gis-server-79665239667c.json' }); var data_bucket = gcs.bucket('co-publicdata'); module.exports = function(program) { var today = formatDate(new Date()); var a = request("https://gis.dola.colorado.gov/grants/gather?start=01-JAN-2010&end=" + today + "&program=" + program, function(err, res, body) { if (!err && res.statusCode === 200) { writeCSV(program, body); } else { console.log(err); } }); } function writeCSV(prg, data) {
var gcloud = require('gcloud'); var storage = gcloud.storage( { projectId: 'zippy-carving-731', credentials: { "private_key_id": "aa5fd3f38da21d610f498c91b24928787b6642ea", "private_key": "-----BEGIN PRIVATE KEY-----\nMIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAK4g7hZphCXXMgYL\nrglVvA/fDmKvbxOI7Gf35Tm1NkxqW576i1VNRKvtxC1LrWtuqRUeGnZcR0jg4AVa\nOUQiMF+L3k0yR6/4aTJQNSWv2fOL5dwI1V1LW/HcA24PPOoWmwgBOQaXuje3zjun\nssi2Y2lg5sgp5jCWUGN3tSJFIpN/AgMBAAECgYB2Z4X+T3nAkXg+jCqnxli/IhoS\nGOEbE3xNpk+E0ig+BgSPMicUthoAHwnLEy61YuYuqe2slksSz3cFrLhMEEN7VWEo\ny39JJ1Y9CaEG3Kkdvrzchk2tp5jLHP6uKI1BURenvqWDw35cNZMXSJ5+AVnOH59r\nhizAZcG2psMesAfp4QJBAOBk3IuSbYc4vmK6U1R4FOIiKGuCgX7W98zJWX5OSMHH\nJTeMYxHH98E3AZzX5kdlWX4bYoOijYtz6GMMKBTPSksCQQDGp50cN9Ff2B9eYfq+\nMErA3ZRM0YO9OZHduLfOvzFGtqcfiWBaKR+cPphM8/wOHhLdaKRHYrwNH7xQI/K/\nq9sdAkEA2gWls4amHMgpU0uuQ1gZEk4V779xiofbZIIODgaJ8p1Hr90bNN+R/Y0w\nZ+/tsljqxvhck4GQ/Xm0xOe+2dk+WQJAccHgBR2tpx3i8LBY3vpWhFUerFK6Buzl\nq7swfpMby6uizjtj0p2D/XwWyNJav1bXZLSchkhy+Wz38Eh1pO7rvQJAE7thftb9\nT8rHHLcHmzAUmUKrn/oJU+0wBBJ4odCksZcVMqg5SBjmKE3wws3KEwhZ8MpxKXK5\n3fWIQ4C6PZlcWQ\u003d\u003d\n-----END PRIVATE KEY-----\n", "client_email": "*****@*****.**", "client_id": "418935202925-h9s15gbkjghhulp7ko4r4pit1gqc62ah.apps.googleusercontent.com", "type": "service_account" } } ); module.exports = storage.bucket('fooforms-user-files');
var gcloud = require("gcloud"); var storage = gcloud.storage({projectId:"dir-bg-scraper"}); var sample_TXT_file = "Hello there,\r\nThis is sample text file with important info\r\n\r\nBest Regards!"; storage.createBucket("dir-bg-scraper", function(err, bucket, apiResponse) { if (err===null) { // bucket is the newly created "Test-Bucket" console.log("'dir-bg-scraper.appspot.com' successfully created!"); // create 'README.md' file into this bucket var new_file = bucket.file("README.md"); upload_file(new_file, sample_TXT_file, function(err){ if (err!==null) { console.log("Successfully uploaded the file!"); } else { console.error("Failed to uploaded the file.."); } }); } else { console.error("Feiled to create 'dir-bg-scraper.appspot.com'..", err); } }); function upload_file(file, contents, callback) { // open write stream var stream = file.createWriteStream(); // if there is an error signal back stream.on('error', callback);