Can I zip files in Firebase Storage via Firebase Cloud Functions? - node.js

Is it possible to compress multiple files in Firebase Storage using Cloud Functions?
For example, there are 5 images uploaded by users and Firebase Cloud Functions will create a zip file for these 5 images

Could not find e2e guide for similar scenario in functions myself, so had to combine solutions for zipping, accessing files in cloud storage etc. See result below:
import * as functions from 'firebase-functions';
import admin from 'firebase-admin';
import archiver from 'archiver';
import { v4 as uuidv4 } from 'uuid';
export const createZip = functions.https.onCall(async () => {
const storage = admin.storage();
const bucket = storage.bucket('bucket-name');
// generate random name for a file
const filePath = uuidv4();
const file = bucket.file(filePath);
const outputStreamBuffer = file.createWriteStream({
gzip: true,
contentType: 'application/zip',
});
const archive = archiver('zip', {
gzip: true,
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
archive.pipe(outputStreamBuffer);
// use firestore, request data etc. to get file names and their full path in storage
// file path can not start with '/'
const userFilePath = 'user-file-path';
const userFileName = 'user-file-name';
const userFile = await bucket.file(userFilePath).download();
archive.append(userFile[0], {
name: userFileName, // if you want to have directory structure inside zip file, add prefix to name -> /folder/ + userFileName
});
archive.on('finish', async () => {
console.log('uploaded zip', filePath);
// get url to download zip file
await bucket
.file(filePath)
.getSignedUrl({ expires: '03-09-2491', action: 'read' })
.then((signedUrls) => console.log(signedUrls[0]));
});
await archive.finalize();
});

Related

Upload file in folder in Firebase storage with node.js

I'm new with uploading files, I want to upload images of different products into Firebase storage and another file that required in the app, one product can have many images, so I want to create a folder for every product, the name of the folder will be the id of the product.
In code: I use #google-cloud/storage library to upload the file into firebase storage, but I search in documentation, no way that I can be able to create a folder then upload it to folder.
here my code :
I create middleware of multer to pass it in an endpoint, with check type of file.
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage")
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
const multer = Multer({
storage: Multer.memoryStorage(),
fileFilter: (req, file, cb) => {
checkFileType(req, file, cb);
}
})
const checkFileType = (req ,file, cb) => {
if (file.fieldname == 'cover' || file.fieldname == 'images') {
if (!file.originalname.match(/\.(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error( "Only images are allowed")
return cb(null, false);
}
} else if (file.fieldname == 'card' || file.fieldname == 'licence') {
if (!file.originalname.match(/\.(pdf|jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error("Only images and pdf are allowed")
return cb(null, false);
}
}
return cb(null, true)
}
module.exports = (req, res, next) => {
return multer.fields([{ name: 'cover', maxCount: 1 },
{ name: 'images', maxCount: 5 }, { name: 'card', maxCount: 1 },
{ name: 'licence', maxCount: 1 }
])
(req, res, () => {
if (req.error) return res.status(400).send( {message : req.error.message })
next()
})
}
the function to upload file is
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
module.exports = {
upload: async ( file) => {
return new Promise((resolve, reject) => {
let newFileName = `${file.originalname}_${Date.now()}`;
let fileUpload = bucket.file(newFileName);
const createStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
createStream.on('error', (error) => {
console.log("error in uploading is" , error)
reject('Something is wrong! Unable to upload at the moment.');
});
createStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = `https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`;
// storage.bucket(process.env.BUCKET_NAME).file(fileUpload.name).makePublic();
resolve(url);
});
createStream.end(file.buffer);
});
the endpoint is
router.post('/add-product' , auth, multer , seller.onAddProduct)
the function onAddProduct is a function that can receive multiple files from the user.
So How can I create a folder for every product, then upload files in the folder?
also, How can I delete the folder after created it?
I am not using the same method you are using but you could use my solution as a case study
await storage.bucket(bucketName).upload(filename, {
destination:"{Foldername}/{Filename}",
})
Folders in Google Cloud Storage are not really a thing. As you can see in this documentation:
gsutil provides the illusion of a hierarchical file tree atop the "flat" name space supported by the Cloud Storage service. To the service, the object gs://your-bucket/abc/def.txt is just an object that happens to have "/" characters in its name. There is no "abc"directory, just a single object with the given name
So what you see as a folder in Cloud Storage is simply another object that is emulating a folder structure, what really matters are the object paths.
In your case there are 2 ways you can go about what you want to do, you can either:
Create an emulated empty directory by creating an object that ends in a trailing slash. For example, to create a subdirectory called foo at the root of a bucket, you would create an empty object (size 0) called foo/ and then upload the file with it's full path.
Simply upload the file with it's full path including the desired "subdirectory" and when you fetch it from GCS it will look like it is located at the emulated directory.
Personally I would use the latter, as you will achieve the same results with only 1 step instead of 2.
If you want to create an empty folder in Cloud Storage, you can do this:
const userId = "your_user_id"
// Folder name. Notice the slash at the end of the path
const folderName = `users/${userId}/`;
// Create a folder
await bucket.file(folderName).save("");
After creating the new folder, you can upload your file there by setting its destination:
const destination = `${folderName}${fileName}`;
await bucket.upload(file, {
destination,
})
But actually you don't need to create a folder as a separate step. You can just set full destination for your file in bucket.upload(...) as described above.

Can't Download File From Cloud Storage to Cloud Function Temporary Storage to Read and Process CSV Import

I am trying to use a firebase onCall funtion to download a CSV file from Cloud Storage, place it in cloud function temporary storage, then read it and upload it into a firestore collection called test. Unfortunately, I can't get the download to cloud function temporary storage to work.
I have tried to follow the google documentation at https://firebase.google.com/docs/storage/extend-with-functions and other examples with the "return bucket.file(filePath).download({destination: tempFilePath}), but without luck.
Here is my code
//THIS IS THE FUNCTION FOR LOADING AN PROCESSING A INCOMING CSV FILE
exports.uploadCSV = functions.https.onCall((data,context)=> {
//get the file name from the boday
const fileOne = data.id + '.csv';
console.log('fileOne', fileOne);
//bring together the director and file name
const fileName = 'uploads/' + fileOne;
console.log('fileName', fileName);
//setup the temporary path
const tmpFilePath = path.join(os.tmpdir(), fileName);
console.log('tmpFilePath', tmpFilePath);
//assign the Firbase project id
const projectId = 'xxxxxxxxx';
console.log('projectId')
//assign storage to the project
const storage = new Storage({ projectId: projectId });
console.log('new Storage')
//setup the bucket configuration
const bucket = admin.storage().bucket();
console.log('bucket', bucket)
//Reference Storage Bucket in firestore
const bucketName = 'xxxxxxxxx.appspot.com';
console.log('bucketName');
//sets myFile to value of file downloaded into cloud function temporary storage.
return storage.bucket(bucketName).file(fileName).download({destination: tmpFilePath})
.then(() => {
console.log('image downloaded to:', tmpFilePath)
//read stream and process
fs.createReadStream('tmpFilePath')
.pipe(csv())
.on('data', (row) => {
console.log(row);
if(row) {
firestore.collection('test').add({
name: row.Name
});
} else {console.log('no data')
}
})
.on('end', () => {
console.log('CSV file successfully processed.');
});
})
.then(() => {
//removes temporary directory
return fs.unlink(tmpFilePath);
})
.catch((err)=>{ console.log(err); });
});
While the Network console returns a 200, this is the error I receive in my firebase-functions logs.
{ Error: ENOENT: no such file or directory, open '/tmp/uploads/somedata.csv'
errno: -2,
code: 'ENOENT',
syscall: 'open',
path: '/tmp/uploads/somedata.csv' }
Your download path includes a folder called "uploads", but you never create that directory explicitly. Instead, try just downloading to os.tmpdir() without any intermediate directories to create:
// no "uploads" here
const fileName = fileOne;
console.log('fileName', fileName);
const tmpFilePath = path.join(os.tmpdir(), fileName);
console.log('tmpFilePath', tmpFilePath);

How to process a scraped image and upload to firebase storage using firebase functions?

I'm trying to grab some HD images from urls, resize them and upload to storage.
So far, i've gotten the image, and resized using sharp. The output API of sharp uses .toFile('output.jpg') or .toBuffer(), and I'm not sure how to proceed from here. What would be the easiest way to output the image, and upload it to firebase storage?
My code so far:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const request = require('request').defaults({ encoding: null });
const sharp = require('sharp');
exports.copyThumbnailImage = functions.firestore.document('users/{userId}/vocab/{vocabId}').onCreate((snapshot, context) => {
// mock: copyThumbnailImage({ chosenImages: [{ googleThumbnailUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQlC7Vnu9CuZlA-nTpW8TLPd8dAE456LCpeXoadUKHoxB7WEmM1rfahqsfr", mime: "image/jpeg", originalUrl: "https://storage.tenki.jp/storage/static-images/suppl/article/image/2/27/278/27810/1/large.jpg" }] }, { params: { userId: 'zYCw6DmcEiOS8Yk4QltYPikykwP2', vocabId: 1420970 } })
const data = snapshot.data()
const vocabId = context.params.vocabId
const images = data.chosenImages
const checkExistencePromises = []
// Promises check if image ref already exists in firestore
images.forEach(imageObj => {
checkExistencePromises.push(db.collection('userImages').where('originalUrl', '==', imageObj.originalUrl).where('vocabId', '==', vocabId).get())
})
return Promise.all(checkExistencePromises)
.then(snapshots => {
const getImagePromises = []
snapshots.forEach((snapshot, i) => {
if (snapshot.empty) {
// if image doesn't exist already, upload thumbnail to DB, add doc to userImages and add link to review
const originalUrl = images[i].originalUrl
getImagePromises.push(getImage(originalUrl))
} else {
// add link to review
}
})
return Promise.all(getImagePromises)
})
.then(responses => {
responses.forEach(response => {
sharp(response).resize(200, 200).toBuffer()
// ????????
})
})
.then(() => {
})
.catch(error => {
console.log(error)
})
})
function getImage (url) {
return new Promise((resolve, reject) => {
request.get(url, (err, res, body) => {
if (err) reject(err)
resolve(body)
})
})
}
You can save it to the local file system (the local /tmp disk) and upload it to Cloud Storage from there.
Have a look at this official Cloud Functions sample: https://github.com/firebase/functions-samples/blob/master/convert-images/functions/index.js. (I copy below the code for future reference)
In particular, look at how they save a temporary file with
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
and how they upload it with:
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
In your case, instead of calling spawn() you would call
.toFile(-theTemporaryFielName-)
Finally, have a look at Write temporary files from Google Cloud Function and Attach firebase cloud function or cache its data from cloud function call about the /tmp disk.
Code from the Cloud Function Sample as of 08/01/2018 (link above)
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
// File extension for the created JPEG files.
const JPEG_EXTENSION = '.jpg';
/**
* When an image is uploaded in the Storage bucket it is converted to JPEG automatically using
* ImageMagick.
*/
exports.imageToJPG = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const baseFileName = path.basename(filePath, path.extname(filePath));
const fileDir = path.dirname(filePath);
const JPEGFilePath = path.normalize(path.format({dir: fileDir, name: baseFileName, ext: JPEG_EXTENSION}));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalJPEGFile = path.join(os.tmpdir(), JPEGFilePath);
// Exit if this is triggered on a file that is not an image.
if (!object.contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a JPEG.
if (object.contentType.startsWith('image/jpeg')) {
console.log('Already a JPEG.');
return null;
}
const bucket = gcs.bucket(object.bucket);
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return bucket.file(filePath).download({destination: tempLocalFile});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Convert the image to JPEG using ImageMagick.
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile);
// Uploading the JPEG image.
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath);
// Once the image has been converted delete the local files to free up disk space.
fs.unlinkSync(tempLocalJPEGFile);
fs.unlinkSync(tempLocalFile);
return;
});
});

Uploading multiple files from a Google Cloud VM to Google Cloud Storage using node.js and Glob

I am trying Node.js to upload multiple files from my Google Compute Engine VM local directory to a GCS bucket that I have already created. I am getting the following error each time I am running the script.
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string. Received type function
The script:
`// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
const fs = require ('fs');
const glob = require('glob');
// The name of the bucket to access, e.g. "my-bucket"
const bucketName = "myBucket";
// Instantiates a client
const storage = new Storage({
projectId: 'myprojectID',
keyFilename: 'my GCS service key'
});
//get files in the local directory of VM
var allfiles = glob('folder/*.js', function (err, files) {
if (err) {
console.log(err);
}
});
// Uploads VM local dir files to the bucket
storage
.bucket(bucketName)
.upload(allfiles)
.then(() => {
console.log(`${allfiles} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});'
Apparently the 'Upload' process needs the file pathnames as string. But that's what the Glob function is supposed to do. Still why is the error?
Any help will be seriously appreciated!
Your mistake is that you use allfiles as the return value for glob. This is not correct, the file names are available in the callback (since glob is async), not in the return value.
glob('folder/*.js', function (err, files) {
if (err) {
console.log(err);
}
var allfiles = files;
// Uploads VM local dir files to the bucket
storage
.bucket(bucketName)
.upload(allfiles)
.then(() => {
console.log(`${allfiles} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});'
});
So I finally made the script to work. The problem was, the filepath was being captured as an object, but Google Cloud Storage service needs the path as string.
The path then had to be changed to a string using JSON.stringify and then split the resuting array to select only the file path content.
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
const fs = require('fs');
var bucketName = 'intended GCS bucketname';
const storage = new Storage({
projectId: 'full project Id', keyFilename: 'cloud service key'
});
//Invoke Glob for reading filepath in local/vm
var glob = require('glob');
glob('path to local/vm directory', function (err, files) {
if (err) {
console.log(err);
}
var list = JSON.stringify(files) ;
list = list.split('"') ;
var listone = list[1] ;
storage
.bucket(bucketName)
.upload(listone)
.then(() => {
console.log(`${listone} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
});

Google cloud function download file and redirect to bucket storage

I am trying to use a google cloud function in Node.js to download a file for wordpress repo then send the file into a google cloud bucket. I have the wordpress file downloading but it fails to write to the google bucket.
function writeToBucket(jsonObject){
/*
* Google API authentication
*/
var gcs = require('#google-cloud/storage')({
projectId: 'wp-media-cdn',
keyFilename: 'wp-media-cdn-d9d7c61bfad9.json'
});
/*
* rename image file with image size, format: size X size imgName
*/
var pluginUrl = "https://downloads.wordpress.org/plugin/bbpress.2.5.14.zip";
newPluginName = "bbpress";
/*
* Read image into stream, upload image to bucket
*/
var request = require('request');
var fs = require('fs'); //used for createWriteString()
var myBucket = gcs.bucket('test_buckyy'); //PUT BUCKET NAME HERE
var file = myBucket.file(nnewPluginName);
// file.exists() returns true if file already in bucket, then returns file url, exits function
if(file.exists()){
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
//pipes image data into fileStream
var fileStream = myBucket.file(newImageName).createWriteStream();
request(imgUrl).pipe(fileStream)
.on('error', function(err) {
console.log('upload failed');
})
.on('finish', function() {
console.log('file uploaded');
});
/*
* return image url
* use getSignedUrl
*/
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
I just replicated your use case scenario and I successfully downloaded the file into the temporary folder of a Cloud Function and from there I copied this file into a bucket.
In order to achieve this, I downloaded the file using createWriteStream into the /tmp folder since is the only folder where we can store files in a Cloud Function, as stated in the Cloud Functions Execution Environment documentation.
After that, I just copied the file to a bucket following this Cloud Storage Uploading Objects documentation.
You can take a look of my sample function
Index.js
const {Storage} = require('#google-cloud/storage');
exports.writeToBucket = (req, res) => {
const http = require('http');
const fs = require('fs');
const file = fs.createWriteStream("/tmp/yourfile.jpg");
const request = http.get("YOUR_URL_TO_DOWNLOAD_A_FILE", function(response) {
response.pipe(file);
});
console.log('file downloaded');
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
const bucketName = 'YOUR_BUCKET_NAME';
const filename = '/tmp/yourfile.jpg';
// Uploads a local file to the bucket
storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache',
},
});
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
};
package.json
{
"name": "sample-http",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "^3.0.3"
}
}
Using Chris32's answer I've created a similar version but avoiding the download of the image to the tmp folder. Hope it's useful !
'use strict';
const http = require('http');
const {Storage} = require('#google-cloud/storage');
exports.http = (request, response) => {
const imageUrl = request.body.url;
const fileName = imageUrl.substring(imageUrl.lastIndexOf('/') + 1);
const storage = new Storage({keyFilename: "keyfile.json"});
const bucket = storage.bucket('MY_BUCKET_NAME');
const file = bucket.file(fileName);
console.log('Uploading image')
http.get(imageUrl, function(res) {
res.pipe(
file.createWriteStream({
resumable: false,
public: true,
metadata: {
contentType: res.headers["content-type"]
}
})
);
});
console.log('Image uploaded')
response.status(201).send('Image successful uploaded!');
};
exports.event = (event, callback) => {
callback();
};

Resources