Destination not being set while uploading files using multer and gcloud - node.js

I am trying to upload files in a specific folder in my google storage bucket. I wrote my code using Google documentations
app.js
const multerMid = multer({
storage: multer.memoryStorage(),
limits: {
// no larger than 5mb.
fileSize: 5 * 1024 * 1024,
},
});
app.disable("x-powered-by");
app.use(
multerMid.fields([
{ name: "avatar", maxCount: 1 },
{ name: "gallery", maxCount: 8 },
])
);
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.post("/uploads", async (req, res, next) => {
try {
const myFile = req.files;
console.log(myFile);
const imageUrl = await uploadImage(myFile.gallery[0]);
res.status(200).json({
message: "Upload was successful",
data: imageUrl,
});
} catch (error) {
next(error);
}
});
uploadimage function
const uploadImage = (file) =>
new Promise((resolve, reject) => {
const { originalname, buffer } = file;
const blob = bucket.file(originalname.replace(/\s+/g, "_"));
const blobStream = blob.createWriteStream({
destination: "documents/image.png",
resumable: false,
});
blobStream
.on("finish", () => {
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve(publicUrl);
})
.on("error", () => {
reject(`Unable to upload image, something went wrong`);
})
.end(buffer);
});
Here, i am setting the destination as documents/image.png but the file is still storing in the bucket root and not in the documents folder. How can i make the files get stored in a specific folder in bucket?

createWriteStream has no property named destination on its configuration option!
https://googleapis.dev/nodejs/storage/latest/global.html#CreateWriteStreamOptions
There is the uploadOptions object:
https://googleapis.dev/nodejs/storage/latest/global.html#UploadOptions
Solution: You should use upload convenience method (which wraps createWriteStream) and pass the uploadOptions object to it which as a destionation property!
https://googleapis.dev/nodejs/storage/latest/Bucket.html#upload

I have figured out the way to store a file in a particular subdirectory in google storage bucket.Google stores a file in its bucket with a name which contains it path too. For eg, if an image name xyz.jpg is store in abc folder in mnp bucket. The filename will be mnp/abc/xyz.jpg. So, to save a file in a particular directory, we just have to add the path in the original file name.
const uploadImage = (file) =>
new Promise((resolve, reject) => {
const { originalname, buffer } = file;
const blob =
bucket.file(originalname.replace(originalname,"document/"+originalname));
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream
.on("finish", () => {
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve(publicUrl);
})
.on("error", () => {
reject(`Unable to upload image, something went wrong`);
})
.end(buffer);
});

Related

How to use Multer middleware to upload array of images

Im trying to use Multer to upload an array of images. At the client side i have a FormData called pictures.
pictures array, from react-native-image-picker:
const [pictures, setPictures] = useState([]);
const imagePickerCallBack = data => {
const picturesData = [...pictures];
const index = picturesData.length;
const image = {
image: data.uri,
fileName: data.fileName,
type: data.type,
index: index,
};
picturesData.push(image);
setPictures(picturesData);
setLoad(false);
};
Step 1 - Create formData with all images:
const data = new FormData();
pictures.forEach(pic => {
data.append('pictures', {
fileName: pic.fileName,
uri: pic.image,
type: pic.type,
});
});
const headers = {
'Content-Type': 'multipart/form-data',
'x-access-token': token,
};
const diaryUpdatePost = await post(`diary/uploadPictures/${diary}`, body, {
headers,
});
Step 2 - Get the request at server side. Im setting up multer and routers:
const router = express.Router();
const multer = require('multer');
const storage = multer.diskStorage({
destination(req, file, cb) {
cb(null, 'uploads/');
},
filename(req, file, cb) {
cb(null, `${file.fieldname}-${Date.now()}`);
},
});
const upload = multer({ storage, limits: { fieldSize: 25 * 1024 * 1024 } });
// Multer with the same FormData (client)
router.post('/uploadPictures/:name', upload.array('pictures'), diaryController.uploadDiaryPictures);
And finally my diaryController, where i need to get all files:
exports.uploadDiaryPictures = async (req, res) => {
// Logging []. I cant access files from here
console.log(`files ${req.files}...`);
};
I already tried to use express-fileupload, but req.files return undefined. Some ideia to help? Thx.
You need to give a count of files you expect to upload:
upload.array('pictures', <number_of_pictures>)
Or if it is allowed to be any number:
upload.any('pictures')
You should also add the file itself to your form data
data.append('pictures', {
name: pic.fileName,
file: pic.image,
type: pic.type,
});
None of the answers here helped. The solution for me was to iteratively append EACH file object from the files array to the same field name given in Multer, instead of appending the files array itself to the field name given in Multer.
So from this:
export const addFiles= createAsyncThunk(
"addFiles",
async (payload: any, thunkApi) => {
const formData = new FormData();
// Here was the problem -- I was appending the array itself
// to the "files" field
formData.append("files", payload.files);
formData.append("data", JSON.stringify(payload?.data || {}));
const response = await axios.post('/user/products/files', formData);
if(response){
return response;
}
return thunkApi.rejectWithValue("");
}
);
I did this:
export const addFiles= createAsyncThunk(
"addFiles",
async (payload: any, thunkApi) => {
const formData = new FormData();
// The following loop was the solution
for (const file of payload.files) {
formData.append("files", file);
}
formData.append("data", JSON.stringify(payload?.data || {}));
const response = await axios.post('/user/products/files', formData);
if(response){
return response;
}
return thunkApi.rejectWithValue("");
}
);
This was my Multer configuration:
multer({ dest: "/uploads" }).array("files")
The files posted to my endpoint were then available to me at:
req.files
PS: Although the accepted answer kind of did that, he did not mention that you cannot append the entire array at once, which was the main problem for me.

How to upload multiple files in nodejs to AWS S3 and save file url into database?

Hi i need to upload multiple images at a time on s3.
Currently i am using express-fileupload to upload single image on AWS, and i want to use same approach to make it upload multiple files to s3 and update images array with urls on mongodb.
My schema property:
const ServiceSchema = new mongoose.Schema(
{
photo: [
{
type: String,
default: 'no-photo.jpg',
},
],
});
module.exports = mongoose.model('Service', ServiceSchema);
My Controller:
// #desc Upload photo for service
// #route PUT /api/v1/services/:id/photo
// #access Private
exports.servicePhotoUpload = asyncHandler(async (req, res, next) => {
const service = await Service.findById(req.params.id);
if (!service) {
return next(new ErrorResponse(`Service not found with id of ${req.params.id}`, 404));
}
// Make sure user adding service is business owner
if (service.user.toString() !== req.user.id && req.user.role !== 'admin') {
return next(
new ErrorResponse(
`User ${req.user.id} is not authorized to update this service to business ${service._id}`,
401
)
);
}
// File Upload validation
if (!req.files) {
return next(new ErrorResponse(`Please upload a file.`, 400));
}
const file = req.files.file;
// Make sure it is a valid image file
if (!file.mimetype.startsWith('image')) {
return next(new ErrorResponse(`Please upload a valid image file.`, 400));
}
//Check File Size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please upload an image less then ${process.env.MAX_FILE_UPLOAD / 1024}KB in size.`,
400
)
);
}
// Create custom filename
file.name = `service-uploads/servicePhoto_${service._id}${path.parse(file.name).ext}`;
uploadToS3({
fileData: req.files.file.data,
fileName: file.name,
})
.then(async (result) => {
console.log('Success Result: ', result);
await Service.findByIdAndUpdate(service._id, { photo: result.Location });
return res
.status(200)
.json({ success: true, message: 'Service photo added successfully', url: result.Location });
})
.catch((err) => {
console.log(err);
return next(new ErrorResponse('Failed to upload file to S3', 500));
});
});
My Utility File to upload File to S3:
const AWS = require('aws-sdk');
const uploadToS3 = (options) => {
// Set the AWS Configuration
AWS.config.update({
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
region: 'us-east-2',
});
// Create S3 service object
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
// Setting up S3 upload parameters
const params = {
Bucket: 'toolbox-uploads',
Key: options.fileName, // File name you want to save as in S3
Body: options.fileData, //
};
// Return S3 uploading function as a promise so return url can be handled properly
return s3.upload(params).promise();
};
module.exports = uploadToS3;
My Router:
const express = require('express');
const {
servicePhotoUpload
} = require('../controllers/service');
const Service = require('../models/Service');
router.route('/:id/photo').put(protect, authorize('publisher', 'business', 'admin'), servicePhotoUpload);
module.exports = router;
This above code is workng 100%.
I am bit confused as there were different approach and none worked for me from google and stack overflow and none of them is getting return url and saving into database.
I want to make separate utility file to upload multiple files to 3 same as i did for single files to use them anywhere. That file should return uploaded urls so i can update my database.
I have tried multer-s3 but no solution works for me.
This approach might be different for you but that is how I was able to resolve the same issue.
First you'll need
Multer
multer-s3
aws-sdk
I made a FileUpload class that handles both single and multi-upload (I also needed to be able to upload pdf and video files) and this is the code in my constructor, note that I also specified the s3-bucket in question from aws.
this.s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_KEY,
Bucket: 'name_of_s3_bucket',
});
I created a method called upload in the class. Code below
upload(path, type) {
let ext = 'jpeg';
const multerFilter = (req, file, cb) => {
if (type === 'image') {
if (file.mimetype.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError(
'Not an Image! Please upload only images',
400
),
false
);
}
} else if (type === 'pdf') {
ext = 'pdf';
const isPdf = file.mimetype.split('/')[1];
if (isPdf.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError('Not a pdf! Please upload only pdf', 400),
false
);
}
}
};
const upload = multer({
storage: multers3({
acl: 'public-read',
s3: this.s3,
bucket: 'name_of_s3_bucket',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
let filename = `user-${
req.user.id
}/${path}/${uuid.v4()}-${Date.now()}.${ext}`;
// eslint-disable-next-line camelcase
const paths_with_sub_folders = [
'auditions',
'biography',
'movies',
];
if (paths_with_sub_folders.includes(path)) {
filename = `user-${req.user.id}/${path}/${
req.params.id
}/${uuid.v4()}-${Date.now()}.${ext}`;
}
cb(null, filename);
},
}),
fileFilter: multerFilter,
limits: {
fileSize: 5000000,
},
});
return upload;
}
To consume the above, I import the class into any controller that I needed an upload feature and called the following.
Side Note : Ignore the paths code (It was just a way to generate unique file name for the files)
const upload = new FileUpload('image').upload('profile-images', 'image');
exports.uploadUserPhoto = upload.array('photos', 10);
I then used the uploadUserPhoto as a middleware before calling the following
exports.addToDB = catchAsync(async (req, res, next) => {
if (!req.files) return next();
req.body.photos = [];
Promise.all(
req.files.map(async (file, i) => {
req.body.photos.push(file.key);
})
);
next();
});
On a high-level overview, this is the flow, First, upload your photos to s3 and get the req.files, then look through that req.files object passing them into an array field on your req object then finally save them on your DB.
NOTE: You must promisify the req.file loop since the task is asynchrnous
My final router looked like this
router
.route('/:id')
.put(uploadUserPhoto, addToDB, updateProfile)
Item.js
Your model can have a field called images thats type array.
const mongoose = require("mongoose");
const ItemSchema = mongoose.Schema({
images: {
type: [],
},
});
module.exports = mongoose.model("Items", ItemSchema);
You map through the array of object and only extract the data you want to store, in this example it is the key which is the unique name given to every image thats uploaded.
route.js
router.post("/", verify, upload.array("image"), async (req, res) => {
const { files } = req;
const images = [];
files.map((file) => {
images.push(file.key);
});
try {
new Item({
images,
}).save();
res.status(200).send({message: "saved images to db"})
}catch(err){
res.status(400).send({message: err})
}
});
Let me know if this does what you wanted

Multer: How to have the filename same with the one save in database

Right now I'm able to basically save just the name or originalname of the image file in both:-
database (using mongoose) and,
image folder
But this way of doing it will have flaw when trying to delete an image. For example if I try to upload the same image twice. Multer will not copy the new image (the same image) again in the destination location or folder (/upload). So if later on, I try to delete a data from:- (bare in mind that right now I've two data that have the same image name)
database (using mongoose)
delete the image affiliate with the data from folder /upload
The other data that use the same image will not have one anymore.
So, I've tried putting new Date().toISOString().replace(/[-T:\.Z]/g, "") on the filename on both the image saved in local folder /upload and document in mongoDb. But obviously that won't work cause the generated date slightly different for both case.
Below is my current code:-
Multer.js
// Mutler
const multer = require('multer')
// Path
const path = require('path')
// File Remove
const fileRemove = require('fs')
const {
// File Base FOlder Location
FILE_BASE_FOLDER_LOCATION = path.resolve(__dirname + '/', '../'),
// Image Folder Location
IMAGE_FOLDER_LOCATION = FILE_BASE_FOLDER_LOCATION + '/upload/',
} = process.env
// storage img
const storageImgFile = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, IMAGE_FOLDER_LOCATION)
},
filename: (req, file, cb) => {
// renaming the image file to have the 'date' and the original file name
cb(null, new Date().toISOString().replace(/[-T:\.Z]/g, "") + '-' + file.originalname)
}
})
// filter img types
const filterImgFile = (req, file, cb) => {
const fileTypes = ['image/png', 'image/jpg', 'image/jpeg']
if(fileTypes.includes(file.mimetype)) cb(null, true)
else cb('Only .png .jpg and .jpeg format allowed!', false)
}
// Img FIle Upload Middleware
const uploadImgFile = multer({
storage: storageImgFile,
filterImgFile: filterImgFile
// limits: { fieldSize: 10000000000 }
})
// Img Removing Handler
const handleImgRemove = (res, imgName) => {
fileRemove.unlink(IMAGE_FOLDER_LOCATION + imgName, (err) => {
if(err) {
return res.status(500).json({
success: false,
error: `Failed at removing file from upload folder`,
data: err
})
}
})
}
module.exports = {
imgFolderLocation: IMAGE_FOLDER_LOCATION,
uploadImgFile,
handleImgRemove,
}
AddNewImage Route:-
router.post('/add', uploadImgFile.single('file'), async(req, res) => {
let { desc } = req.body
const image = new Image({
// the date will be slightly different or not same with the 'imagename' saved in 'local' destination
imgName: new Date().toISOString().replace(/[-T:\.Z]/g, "") + '-' + req.file.originalname,
desc: desc
});
image.save()
.then(res => {
return res.status(200).json({
success: true,
count: res.length,
data: res
})
})
.catch(err => {
console.log(err)
return res.status(500).json({
success: false,
error: `Failed to upload new image!`,
data: err
})
})
})
DeleteImage Route:-
router.delete('/delete/:id', async(req, res) => {
await Image.findByIdAndDelete(req.params.id)
.then(data => {
// - remove image from upload folder
handleImgRemove(res, data.imgName)
return res.status(200).json({
success: true,
count: data.length,
data: data
})
})
.catch(err => {
return res.status(500).json({
success: false,
error: `Failed to delete image from DB!`,
data: err
})
})
})
I'm hoping to get both (imgName saved in mongoDb) and filename saved in local destination folder /upload will be the same. But I can't. So how can I make sure that both of them will have the same naming system or ways?
const multer = require("multer");
const uuid = require("uuid").v4;
const path = require("path");
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "uploads");
},
filename: (req, file, cb) => {
const ext = path.extname(file.originalname);
const originalname = `${uuid()}${ext}`;
cb(null, originalname);
},
});
const upload = multer({ storage });
npm install uuid
You would need to give each image a unique name using uuid, you can do this by giving each image a filename like in the example. This will also slove the problem where the same file won't be stored twice. You can additionally add the current date to the filename to further reduce the chance of two files having the same name.
This is what the image object will look like
{
fieldname: 'file',
originalname: 'image1.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
destination: 'uploads',
filename: '31d41f39-b4ad-467c-bb09-601596240fe8.jpg',
path: 'uploads\\31d41f39-b4ad-467c-bb09-601596240fe8.jpg',
size: 961388
}

Upload screenshots to google cloud storage bucket with Fluent-ffmpeg

I am currently using multer to upload videos to my google storage bucket, and fluent-ffmpeg to capture thumbnails of the videos. Videos are being uploaded into the buckets correctly, but not the thumbnails from ffmpeg. How can I change the location of the thumbnails to my google storage bucket?
Back-End Video upload
require ('dotenv').config()
const express = require('express');
const router = express.Router();
const multer = require("multer");
var ffmpeg = require('fluent-ffmpeg');
const multerGoogleStorage = require('multer-google-storage');
const { Video } = require("../models/Video");
const {User} = require("../models/User")
const { auth } = require("../middleware/auth");
var storage = multer({
destination: function (req, file, cb) {
cb(null, 'videos/')
},
filename: function (req, file, cb) {
cb(null, `${Date.now()}_${file.originalname}`)
},
fileFilter: (req, file, cb) => {
const ext = path.extname(file.originalname)
if (ext !== '.mp4' || ext !== '.mov' || ext !== '.m3u' || ext !== '.flv' || ext !== '.avi' || ext !== '.mkv') {
return cb(res.status(400).end('Error only videos can be uploaded'), false);
}
cb(null,true)
}
})
// Set location to google storage bucket
var upload = multer({ storage: multerGoogleStorage.storageEngine() }).single("file")
router.post("/uploadfiles", (req, res) => {
upload(req, res, err => {
if (err) {
return res.json({sucess: false, err})
}
return res.json({ success: true, filePath: res.req.file.path, fileName: res.req.file.filename})
})
});
Back-end thumbnail upload
router.post("/thumbnail", (req, res) => {
let thumbsFilePath = "";
let fileDuration = "";
ffmpeg.ffprobe(req.body.filePath, function (err, metadata) {
console.dir(metadata);
console.log(metadata.format.duration);
fileDuration = metadata.format.duration;
})
ffmpeg(req.body.filePath)
.on('filenames', function (filenames) {
console.log('Will generate ' + filenames.join(', '))
thumbsFilePath = "thumbnails/" + filenames[0];
})
.on('end', function () {
console.log('Screenshots taken');
return res.json({ success: true, thumbsFilePath: thumbsFilePath, fileDuration: fileDuration })
})
//Can this be uploaded to google storage?
.screenshots({
// Will take 3 screenshots
count: 3,
folder: '/thumbnails/',
size: '320x240',
//Names file w/o extension
filename:'thumbnail-%b.png'
});
});
Front-end video upload
const onDrop = (files) => {
let formData = new FormData();
const config = {
header: {'content-type': 'multipart/form-data'}
}
console.log(files)
formData.append("file", files[0])
axios.post('/api/video/uploadfiles', formData, config)
.then(response => {
if (response.data.success) {
let variable = {
filePath: response.data.filePath,
fileName: response.data.fileName
}
setFilePath(response.data.filePath)
//Thumbnail
axios.post('/api/video/thumbnail', variable)
.then(response => {
if (response.data.success) {
setDuration(response.data.fileDuration)
setThumbnail(response.data.thumbsFilePath)
} else {
alert("Failed to generate a thumbnail");
}
})
} else {
alert('Failed to save video to the server')
}
})
}
Here you can find the sample code of an application web page prompting the user to supply a file to be stored in Cloud Storage. The code is configuring bucket using environment variables and creates a new blob in the bucket to upload the file data.
I hope this information helps.
You may have to just move them after they're generated with ffmpeg.
For example, I'm writing them to a temp directory outputted by ffmpeg, and then moving after to a Cloud Storage bucket in my cloud function:
const uploadResult = await bucket.upload(targetTempFilePath, {
destination: targetStorageFilePath,
gzip: true
});
Not sure which environment you're using (flex, cloud run, etc) but these were the instructions I was referencing, and are generally the same steps you'll want to follow: https://firebase.google.com/docs/storage/extend-with-functions

Upload a file to Google Cloud, in a specific directory

How to upload a file on Google Cloud, in a specific bucket directory (e.g. foo)?
"use strict";
const gcloud = require("gcloud");
const PROJECT_ID = "<project-id>";
let storage = gcloud.storage({
projectId: PROJECT_ID,
keyFilename: 'auth.json'
});
let bucket = storage.bucket(`${PROJECT_ID}.appspot.com`)
bucket.upload("1.jpg", (err, file) => {
if (err) { return console.error(err); }
let publicUrl = `https://firebasestorage.googleapis.com/v0/b/${PROJECT_ID}.appspot.com/o/${file.metadata.name}?alt=media`;
console.log(publicUrl);
});
I tried:
bucket.file("foo/1.jpg").upload("1.jpg", ...)
But there's no upload method there.
How can I send 1.jpg in the foo directory?
In Firebase, on the client side, I do:
ref.child("foo").put(myFile);
bucket.upload("1.jpg", { destination: "YOUR_FOLDER_NAME_HERE/1.jpg" }, (err, file) => {
//Do something...
});
This will put 1.jpg in the YOUR_FOLDER_NAME_HERE-folder.
Here is the documentation. By the way, gcloud is deprecated and you should use google-cloud instead.
UPDATE 2020
according to google documentation:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage()
const bucket = storage.bucket('YOUR_GCLOUD_STORAGE_BUCKET')
const blob = bucket.file('youFolder/' + 'youFileName.jpg')
const blobStream = blob.createWriteStream({
resumable: false,
gzip: true,
public: true
})
blobStream.on('error', (err) => {
console.log('Error blobStream: ',err)
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = ('https://storage.googleapis.com/'+ bucket.name + '/' + blob.name)
res.status(200).send(publicUrl);
});
blobStream.end(req.file.buffer)//req.file is your original file
Here you go...
const options = {
destination: 'folder/new-image.png',
resumable: true,
validation: 'crc32c',
metadata: {
metadata: {
event: 'Fall trip to the zoo'
}
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});
If accessing from the same project projectId , keyFilename,.. not required,I use the below code for both upload and download , it works fine.
// Imports the Google Cloud client library
const Storage = require('#google-cloud/storage');
const storage = new Storage();
var destFilename = "./test";
var bucketName = 'cloudtesla';
var srcFilename = 'test';
const options = {
destination: destFilename,
};
//upload file
console.log("upload Started");
storage.bucket(bucketName).upload(srcFilename, {}, (err, file) => {
if(!err)
console.log("upload Completed");
else
console.log(err);
});
//Download file
console.log("Download Started");
storage
.bucket(bucketName)
.file(srcFilename)
.download(options)
.then(() => {
console.log("Download Completed");
})
.catch(err => {
console.error('ERROR:', err);
});
To upload inside specific directory in .NET Core, use
var uploadResponse= await storageClient.UploadObjectAsync(bucketName, $"{foldername}/"+fileName, null, memoryStream);
This should upload your file 'fileName' inside folder 'foldername' in the bucket
I think just adding foo/ to the filename should work, like bucket.upload("foo/1.jpg", (err, file) ... In GCS, directories just a matter of having a '/' in the file name.
If you want to use async-await while uploading files into storage buckets the callbacks won't do the job, Here's how I did it.
async function uploadFile() {
const destPath = 'PATH_TO_STORAGE/filename.extension';
await storage.bucket("PATH_TO_YOUR_BUCKET").upload(newFilePath, {
gzip: true,
destination: destPath,
});
}
Hope it helps someone!

Resources