I'm new with uploading files, I want to upload images of different products into Firebase storage and another file that required in the app, one product can have many images, so I want to create a folder for every product, the name of the folder will be the id of the product.
In code: I use #google-cloud/storage library to upload the file into firebase storage, but I search in documentation, no way that I can be able to create a folder then upload it to folder.
here my code :
I create middleware of multer to pass it in an endpoint, with check type of file.
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage")
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
const multer = Multer({
storage: Multer.memoryStorage(),
fileFilter: (req, file, cb) => {
checkFileType(req, file, cb);
}
})
const checkFileType = (req ,file, cb) => {
if (file.fieldname == 'cover' || file.fieldname == 'images') {
if (!file.originalname.match(/\.(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error( "Only images are allowed")
return cb(null, false);
}
} else if (file.fieldname == 'card' || file.fieldname == 'licence') {
if (!file.originalname.match(/\.(pdf|jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error("Only images and pdf are allowed")
return cb(null, false);
}
}
return cb(null, true)
}
module.exports = (req, res, next) => {
return multer.fields([{ name: 'cover', maxCount: 1 },
{ name: 'images', maxCount: 5 }, { name: 'card', maxCount: 1 },
{ name: 'licence', maxCount: 1 }
])
(req, res, () => {
if (req.error) return res.status(400).send( {message : req.error.message })
next()
})
}
the function to upload file is
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
module.exports = {
upload: async ( file) => {
return new Promise((resolve, reject) => {
let newFileName = `${file.originalname}_${Date.now()}`;
let fileUpload = bucket.file(newFileName);
const createStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
createStream.on('error', (error) => {
console.log("error in uploading is" , error)
reject('Something is wrong! Unable to upload at the moment.');
});
createStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = `https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`;
// storage.bucket(process.env.BUCKET_NAME).file(fileUpload.name).makePublic();
resolve(url);
});
createStream.end(file.buffer);
});
the endpoint is
router.post('/add-product' , auth, multer , seller.onAddProduct)
the function onAddProduct is a function that can receive multiple files from the user.
So How can I create a folder for every product, then upload files in the folder?
also, How can I delete the folder after created it?
I am not using the same method you are using but you could use my solution as a case study
await storage.bucket(bucketName).upload(filename, {
destination:"{Foldername}/{Filename}",
})
Folders in Google Cloud Storage are not really a thing. As you can see in this documentation:
gsutil provides the illusion of a hierarchical file tree atop the "flat" name space supported by the Cloud Storage service. To the service, the object gs://your-bucket/abc/def.txt is just an object that happens to have "/" characters in its name. There is no "abc"directory, just a single object with the given name
So what you see as a folder in Cloud Storage is simply another object that is emulating a folder structure, what really matters are the object paths.
In your case there are 2 ways you can go about what you want to do, you can either:
Create an emulated empty directory by creating an object that ends in a trailing slash. For example, to create a subdirectory called foo at the root of a bucket, you would create an empty object (size 0) called foo/ and then upload the file with it's full path.
Simply upload the file with it's full path including the desired "subdirectory" and when you fetch it from GCS it will look like it is located at the emulated directory.
Personally I would use the latter, as you will achieve the same results with only 1 step instead of 2.
If you want to create an empty folder in Cloud Storage, you can do this:
const userId = "your_user_id"
// Folder name. Notice the slash at the end of the path
const folderName = `users/${userId}/`;
// Create a folder
await bucket.file(folderName).save("");
After creating the new folder, you can upload your file there by setting its destination:
const destination = `${folderName}${fileName}`;
await bucket.upload(file, {
destination,
})
But actually you don't need to create a folder as a separate step. You can just set full destination for your file in bucket.upload(...) as described above.
Related
Folder Structure image# Multer.js File
const multer = require("multer");
const path = require("path");
const fs = require("fs");
const httpStatus = require("http-status");
const ApiError = require("../utils/ApiError")
const logger = require("../utils/logger");
const multerUpload = async (req, res, next) => {
let fileName = "";
let storage = multer.diskStorage({
destination: function (req, file, callback) {
fs.mkdir(path.join(path.resolve(), "/tmp"), (err) => {
if (err) {
logger.error("mkdir tmp %o", err);
}
callback(null, path.join(path.resolve(), "/tmp"));
});
},
filename: function (req, file, callback) {
fileName = file.fieldname + "-" + req.query.eventId + Date.now() + path.extname(file.originalname);
logger.info("filename of uploadSheet===> %s", fileName);
callback(null, fileName);
},
});
// below code is to read the added data to DB from file
var upload = multer({
storage: storage,
fileFilter: function (req, file, callback) {
var ext = path.extname(file.originalname);
if (ext !== '.xlsx') {
return callback(new Error('Only Excel sheets are allowed'))
}
callback(null, true)
},
}).single("sheet");
upload(req, res, async function (err) {
if (err) {
next(new ApiError(httpStatus.INTERNAL_SERVER_ERROR, err.message));
} else {
req.fileName = fileName;
next();
}
})
}
module.exports = multerUpload;
It gives error of EORFS read only file in vercel production but the code works fine in local.
I'm trying to upload the excel sheet file from the Api and then read the data from it and add it into the Mongodb.
I once encountered this same problem working with Heroku a long time ago, I haven't worked with vercel but with quick research, I will say this is the cause, vercel does not provide storage for you to upload files to in production, you need a separate service for that like Amazon S3, but there also exists Azure File Storage and Google Cloud Storage.
alternatively, if you don't want to add more services to your project, you can just convert the image to base64 string and save it as text(but need to make the field/column read-only so it does not get corrupted) NOT the best alternative but it was something I once did
To use /tmp in server functions, you should just use /tmp/your-file. Remove path.resolve().
Only if you need to store something temporarily, you may try to use /tmp directory.
Limit 512 MB + no guaranty - https://github.com/vercel/vercel/discussions/5320
How to remove file from Google storage bucket using MulterGoogleStorage and NestJS? I can't find any example or docs.
I have next storage for upload files:
const storage = new MulterGoogleStorage({
projectId: 'myprojectId',
keyFilename: path.join(__dirname, '../../../mykeyfile.json'),
bucket: 'mybucketname',
filename: (req: Request, file, cb) => {
let dir = '';
const filePath = file.originalname.split('/');
if(filePath.length > 1) {
dir = `${filePath[0]}/`;
}
const fileExt = file.originalname.split('.').pop();
cb(null, `${dir}${Date.now()}.${fileExt}`);
}
});
You can create something like this, which would iterate over an array containing all the objects, and would delete.
This uses the delete function on the Google Cloud Storage documentation.
const storage = new Storage({keyFilename: 'google-credentials.json'});
const imagesToDelete = ['fileName1', 'fileName2', 'fileName3'];
imagesToDelete.map(async (image) => {
await storage
.bucket('yourbucketName')
.file(image)
.delete();
});
I have a requirement where I want to upload multiple images in node.js. so I am using angular2 for frontend. I am using formData to send the files to node part.
below is the code :
public setAnswerImage(obj:any,files:any):Promise<any> {
try {
let urlDetails = obj["id"] ? ANSWER_URLS['updateimages'] : ANSWER_URLS['insertimages'];
let formData = new FormData();
Object.keys(obj).forEach(key =>{
formData.append(key,obj[key]);
});
console.log(formData)
for(let i =0; i < files.length; i++){
formData.append("uploads[]", files[i],files[i]['name']);
}
console.log(formData)
return this.restUtils.post(urlDetails, formData, {"id":obj["id"]});
}
catch(err) {
return Promise.reject(err);
}
}
let urlDetails = obj["id"] ? ANSWER_URLS['updateimages'] : ANSWER_URLS['insertimages'];
If obj contains id I will be calling update method else insert method.
export const ANSWER_URLS = {
insertimages : { url : "/api/answer/response/insertimages"}
}
In Node part I am using multer file storage, i have created a folder as temp and path has been set to this folder in multer as shown below.
router.post("/response/insertimages", upload.array("uploads[]", 12), (req: Request, res: Response, next: NextFunction) => {
new AnswerRoute().saveAnswerImages(req, res, next);
});
var storage = multer.diskStorage({
destination: (req: Request, file, cb) => {
cb(null, video_path.dest + 'temp//');
}, filename: (req:Request, file, cb) => {
let name = file.originalname.split('.');
console.log(name,"mnae")
cb(null, new Date().getTime().toString() + '.' + name[name.length-1]);
}
})
var upload = multer({ storage: storage });
Console.log output shows multiple files I uploaded but in temp folder, I can see only one :
[ 'doctor', 'jpg' ] 'mnae'
[ 'Doctorvisits', 'png' ] 'mnae'
The above output shows two images I uploaded but only one file is saved in the temp folder.
I am using upload.array("uploads[]",12), where uploads contain multiple files but the problem is when I upload multiple files only first file gets uploaded to the temp folder.
Am I going wrong somewhere?
Please help me
Is it possible to compress multiple files in Firebase Storage using Cloud Functions?
For example, there are 5 images uploaded by users and Firebase Cloud Functions will create a zip file for these 5 images
Could not find e2e guide for similar scenario in functions myself, so had to combine solutions for zipping, accessing files in cloud storage etc. See result below:
import * as functions from 'firebase-functions';
import admin from 'firebase-admin';
import archiver from 'archiver';
import { v4 as uuidv4 } from 'uuid';
export const createZip = functions.https.onCall(async () => {
const storage = admin.storage();
const bucket = storage.bucket('bucket-name');
// generate random name for a file
const filePath = uuidv4();
const file = bucket.file(filePath);
const outputStreamBuffer = file.createWriteStream({
gzip: true,
contentType: 'application/zip',
});
const archive = archiver('zip', {
gzip: true,
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
archive.pipe(outputStreamBuffer);
// use firestore, request data etc. to get file names and their full path in storage
// file path can not start with '/'
const userFilePath = 'user-file-path';
const userFileName = 'user-file-name';
const userFile = await bucket.file(userFilePath).download();
archive.append(userFile[0], {
name: userFileName, // if you want to have directory structure inside zip file, add prefix to name -> /folder/ + userFileName
});
archive.on('finish', async () => {
console.log('uploaded zip', filePath);
// get url to download zip file
await bucket
.file(filePath)
.getSignedUrl({ expires: '03-09-2491', action: 'read' })
.then((signedUrls) => console.log(signedUrls[0]));
});
await archive.finalize();
});
I am a bit overstrained with handling file uploads using multer. What I want to do is:
Uploading one or multiple files via ajax
Storing these files at a dynamic destination folder
Processing the contents files with a specific format (.json/.xml/.txt)
1) is already done and I managed storing these files inside of a tmp folder.
2) As far as I understood I need to store these files first and then move them to my desired location (which is a dynamic location)
3) I want to parse the files if possible, otherwise the files should just be stored as asset.
My questions:
How can I store (or move) the files at a dynamic target?
How can I process the file contents (for example to store the parsed contents in a database)?
My code:
var storage = multer.diskStorage({
destination: function (req, file, cb) {
fs.mkdirsSync(config.fileHandler.uploadTempDir)
cb(null, config.fileHandler.uploadTempDir)
},
filename: function (req, file, cb) {
cb(null, Date.now() + '-' + file.originalname)
}
})
const upload = multer({ storage: storage })
var stringsUpload = upload.fields([{ name: 'sourceStrings[]', maxCount: 8 }])
router.post('/:projectId/update/sourcestrings', isLoggedIn, stringsUpload, processSourceStrings)
function processSourceStrings(req, res, next) {
var projectId = req.params.projectId
Project.findOne({ project_id: projectId }).populate('owner').then(function (project) {
if (!project)
return res.send(404, { error: "Couldn't find a project with this id" })
if (!isAllowedToEditProject(req, project))
return res.send(403, { error: "You are not allowed to edit this project" })
// Move files to the right destination
var targetpath = 'user/1/project/4/'
// Process file
return res.status(200).send()
})
}