I have this express backend for adding products to a database, now I have configured it to take product image then name, price, type, and color and it has worked very well so far. But now I am trying to make it so that it can take not one image but up to four but I've been running into issues. The initial code for the single image is as follows
First the config for Cloudinary
const express = require("express");
const cloudinary = require("cloudinary").v2;
const { CloudinaryStorage } = require("multer-storage-cloudinary");
const multer = require("multer");
const verify = require("../routes/verifyToken");
cloudinary.config({
cloud_name: process.env.CLOUD_NAME,
api_key: process.env.API_KEY,
api_secret: process.env.API_SECRET,
});
const storage = new CloudinaryStorage({
cloudinary: cloudinary,
params: {
folder: "Shoes",
format: async (req, file) => {
"jpg", "png";
}, // supports promises as well
public_id: (req, file) => {
console.log(
new Date().toISOString().replace(/:/g, "-") + file.originalname
);
return (
new Date().toISOString().replace(/:/g, "-") + file.originalname
);
},
},
});
const parser = multer({ storage: storage });
Then now the post request to post the shoes(product).
router.post("/post/menshoe", verify,parser.single("shoeImage"), async (req, res) => {
// console.log(req.file);
if (!req.file) return res.send("Please upload a file");
// console.log(req.file); // to see what is returned to you
const image = {};
console.log(req.file)
const shoeUpload = new MenShoe({
shoeImage: req.file.path,
name: req.body.name,
type: req.body.type,
price: req.body.price,
color: req.body.color,
});
console.log(shoeUpload);
try {
const shoe = await shoeUpload.save();
res.json({ msg: "Shoe uploaded", success: true, shoe });
} catch (err) {
console.log(err);
res.json({
msg: "Failed to upload",
success: false,
err,
});
}
}
);
I would like to point out that I've tried to research for a way but each answer that I have encountered is using a completely different way to post images and I am seriously trying to avoid starting to write this from scratch as I have written a lot of code exactly like this. I would really appreciate it if anyone can help me achieve this with just a few tweaks to this code.
Thanks in advance
In your model dir;
const shoeSchema = new mongoose.Schema({
// other properties here
shoeImage: [{
type: String,
required: true // it could be optional
}],
});
module.exports = Shoe = mongoose.model('product', shoeSchema);
Inside your post route,
router.post("/post/menshoe", verify,parser.array("shoeImage", 4), async
(req, res) => {
const { name, type, price, color } = req.body;
try {
let shoeUpload = new MenShoe({
name,
type,
price,
color
});
if (req.files) { // if you are adding multiple files at a go
const imageURIs = []; // array to hold the image urls
const files = req.files; // array of images
for (const file of files) {
const { path } = file;
imageURIs.push(path);
};
shoeUpload['shoeImage'] = imageURIs; // add the urls to object
await shoeUpload.save();
return res.status(201).json({ shoeUpload });
}
if (req.file && req.file.path) {// if only one image uploaded
shoeUpload['shoeImage'] = req.file.path; // add the single
await shoeUpload.save();
return res.status(201).json({ shoeUpload });
};
// you could save here without the image
...
return res.status(400).json({ // in case things don't work out
msg: 'Please upload an image'
});
}catch {
console.error("server error occur", error.message);//only in dev
return res.status(500).send("Server Error Occurred");
}
});
Related
Please I would like to handle all errors from multer while uploading files to cloudinary and create my own custom error messages. How would I do that?
This is the cloudinary and multer configuration
const storage = new CloudinaryStorage({
cloudinary: cloudinary,
params: {
folder: "Products",
format: async (req, file) => {
"jpg", "png";
},
public_id: (req, file) => {
console.log(
new Date().toISOString().replace(/:/g, "-") + file.originalname
);
return (
new Date().toISOString().replace(/:/g, "-") + file.originalname
);
},
},
});
const parser = multer({ storage: storage });
And this is the route
router.post("/addProduct", middleware.IsMerchant, parser.array("Images", 3), async(req, res)=>{
const { ProductName, Description, Category, Price} = req.body
try {
let product = new Product({
ProductName,
Description,
Category,
Price
})
if (req.files) { // if you are adding multiple files at a go
const imageURIs = []; // array to hold the image urls
const files = req.files; // array of images
for (const file of files) {
const { path } = file;
imageURIs.push(path);
};
product['Images'] = imageURIs;
product.Owner.id = req.user._id
product.Owner.username = req.user.username
await product.save();
console.log("added new product")
req.flash("success", "You did it")
res.status(201).redirect("back");
}
} catch (error) {
console.log(error)
}
})
I am using multer to store book covers in my server but the thing is multer is a middleware it will first save image in my server and then will carry on with later part of code which is storing book info in my database.
And in case book creation fails (because of wrong title or isbn or error) then I am left with a copy of image uploaded. So, what should I do with it.
Here's a copy of my code which handles this part.
router code to handle request from client
const router = require('express').Router();
const multer = require('multer');
const createBook = require('../controller/createBook');
const { auth } = require('../middleware/authHandler');
const routePlan = require('../route_plan');
const renderFilePath = routePlan.createBook[2];
const redirectUrl = routePlan.createBook[0]
const upload = multer({
dest: 'public/bookCover/',
limits: {
fileSize: 10 * 1024 * 1024,
},
fileFilter(req, file, cb) {
if (!file.originalname.match(/\.(png|jpg|jpeg)$/)) {
cb(new Error('Please upload an image.'))
}
cb(undefined, true)
}
});
router.use(auth);
router.get('/', async(req, res) => {
res.render(renderFilePath, { post_to: redirectUrl });
});
router.post('/', upload.single('bookCover'), createBook);
module.exports = router;
controller code to create book
const { Book, JoiValidBook } = require('../models/book');
const { User } = require('../models/user');
//create Book after validation returns true.
async function createBook(req, res){
const validBook = req.validatedBook;
let book = new Book(validBook);
book = await book.save();
await User.findOneAndUpdate(
{ _id: req.user._id },
{ $push: { book_id: book._id } });
res.send({ msg: "Congrats your book has been published on our website!!! " });
}
//isbn validatipon
function checkISBN(isbn){
//code for checking isbn return true for valid else false
}
//takes data posted and form it in a readable format
//then validate/sanitize it against schema
//if error arises or book already exists a msg is passed on
//else book creation process is executed
module.exports = async function(req, res){
let book = {
img: req.file?.path,
title: req.body.title,
isbn: req.body.isbn,
author_name: req.user.username,
summary: "Summary need to be updated plz check after sometime",
pub_date: new Date()
};
const { err } = JoiValidBook.validate(book);
if (err) {
res.status(406);
return res.render('error', { message: err.details[0].message });
} else {
const ExistBook = await Book.findOne({
$or: [
{ title: book.title },
{ isbn: book.isbn }
]
});
if (ExistBook) return res.status(400).render('error', {
message: "It Seems book with same title or isbn already exists"
});
if(!checkISBN(book.isbn)) return res.render('error', {
message: "Plz Enter valid isbn number!!!"
});
req.validatedBook = book;
createBook(req, res);
}
};
Hi at the time of uploading the file if something failed. You can add a checker
if failed then check the file in path and delete it.
you can put delete file code in if(err) section.
I want to delete the old image from images folder when I update the product in NodeJS, product is updating, but old image is not deleting from the folder.
index.js
app.use(bodyParser.json({extended:true}))
app.use(bodyParser.urlencoded({extended:true}))
app.use(cors())
app.use('/', route)
app.use('/images', express.static('images'))
function
export const updateProduct = async (req, res)=>
{
try{
let image
const oldProduct = await Product.findOne({_id:req.params.id})
const {name,price,quantity,category} = req.body
if(req.file)
{
image = req.file.filename
const oldImageUrl= `/images/${oldProduct.image}`
// this is url of the old image http://localhost:2001/images/1629969633380_r.png
await fs.unlinkSync(oldImageUrl)
}else{
image = oldProduct.image
}
const productToUpdate = new Product({name,category,quantity,price,image})
await Product.updateOne({_id:req.params.id},productToUpdate)
res.status(200).json('product Updated')
}catch(error)
{
res.status(404).json({message:error.message})
}
}
Based on this bit of code:
app.use('/images', express.static('images'))
You should try to delete the image relative to the app folder.
const oldImageUrl= `images/${oldProduct.image}`
Or even better yet, use the path module.
const { join } = require('path');
...
const oldImageUrl = join(__dirname, 'images', oldProduct.image);
router.post('/update/:id', upload.single("file"), async (req, res) => {
let data = {
name: req.body.name,
price: req.body.price,
quantity: req.body.quantity,
discount: req.body.discount,
discription: req.body.discription,
file: req.file.filename
}
const oldProduct = await products.findOne({ _id: req.params.id });
const result = await products.findByIdAndUpdate(req.params.id, data,);
fs.unlink('./public/image/' + oldProduct.file, function (err) {
if (err && err.code == 'ENOENT') {
// file doens't exist
console.info("File doesn't exist, won't remove it.");
} else if (err) {
// other errors, e.g. maybe we don't have enough permission
console.error("Error occurred while trying to remove file");
} else {
console.info(`removed`);
}
});
res.redirect('/listProducts');
})
Hi i need to upload multiple images at a time on s3.
Currently i am using express-fileupload to upload single image on AWS, and i want to use same approach to make it upload multiple files to s3 and update images array with urls on mongodb.
My schema property:
const ServiceSchema = new mongoose.Schema(
{
photo: [
{
type: String,
default: 'no-photo.jpg',
},
],
});
module.exports = mongoose.model('Service', ServiceSchema);
My Controller:
// #desc Upload photo for service
// #route PUT /api/v1/services/:id/photo
// #access Private
exports.servicePhotoUpload = asyncHandler(async (req, res, next) => {
const service = await Service.findById(req.params.id);
if (!service) {
return next(new ErrorResponse(`Service not found with id of ${req.params.id}`, 404));
}
// Make sure user adding service is business owner
if (service.user.toString() !== req.user.id && req.user.role !== 'admin') {
return next(
new ErrorResponse(
`User ${req.user.id} is not authorized to update this service to business ${service._id}`,
401
)
);
}
// File Upload validation
if (!req.files) {
return next(new ErrorResponse(`Please upload a file.`, 400));
}
const file = req.files.file;
// Make sure it is a valid image file
if (!file.mimetype.startsWith('image')) {
return next(new ErrorResponse(`Please upload a valid image file.`, 400));
}
//Check File Size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please upload an image less then ${process.env.MAX_FILE_UPLOAD / 1024}KB in size.`,
400
)
);
}
// Create custom filename
file.name = `service-uploads/servicePhoto_${service._id}${path.parse(file.name).ext}`;
uploadToS3({
fileData: req.files.file.data,
fileName: file.name,
})
.then(async (result) => {
console.log('Success Result: ', result);
await Service.findByIdAndUpdate(service._id, { photo: result.Location });
return res
.status(200)
.json({ success: true, message: 'Service photo added successfully', url: result.Location });
})
.catch((err) => {
console.log(err);
return next(new ErrorResponse('Failed to upload file to S3', 500));
});
});
My Utility File to upload File to S3:
const AWS = require('aws-sdk');
const uploadToS3 = (options) => {
// Set the AWS Configuration
AWS.config.update({
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
region: 'us-east-2',
});
// Create S3 service object
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
// Setting up S3 upload parameters
const params = {
Bucket: 'toolbox-uploads',
Key: options.fileName, // File name you want to save as in S3
Body: options.fileData, //
};
// Return S3 uploading function as a promise so return url can be handled properly
return s3.upload(params).promise();
};
module.exports = uploadToS3;
My Router:
const express = require('express');
const {
servicePhotoUpload
} = require('../controllers/service');
const Service = require('../models/Service');
router.route('/:id/photo').put(protect, authorize('publisher', 'business', 'admin'), servicePhotoUpload);
module.exports = router;
This above code is workng 100%.
I am bit confused as there were different approach and none worked for me from google and stack overflow and none of them is getting return url and saving into database.
I want to make separate utility file to upload multiple files to 3 same as i did for single files to use them anywhere. That file should return uploaded urls so i can update my database.
I have tried multer-s3 but no solution works for me.
This approach might be different for you but that is how I was able to resolve the same issue.
First you'll need
Multer
multer-s3
aws-sdk
I made a FileUpload class that handles both single and multi-upload (I also needed to be able to upload pdf and video files) and this is the code in my constructor, note that I also specified the s3-bucket in question from aws.
this.s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_KEY,
Bucket: 'name_of_s3_bucket',
});
I created a method called upload in the class. Code below
upload(path, type) {
let ext = 'jpeg';
const multerFilter = (req, file, cb) => {
if (type === 'image') {
if (file.mimetype.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError(
'Not an Image! Please upload only images',
400
),
false
);
}
} else if (type === 'pdf') {
ext = 'pdf';
const isPdf = file.mimetype.split('/')[1];
if (isPdf.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError('Not a pdf! Please upload only pdf', 400),
false
);
}
}
};
const upload = multer({
storage: multers3({
acl: 'public-read',
s3: this.s3,
bucket: 'name_of_s3_bucket',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
let filename = `user-${
req.user.id
}/${path}/${uuid.v4()}-${Date.now()}.${ext}`;
// eslint-disable-next-line camelcase
const paths_with_sub_folders = [
'auditions',
'biography',
'movies',
];
if (paths_with_sub_folders.includes(path)) {
filename = `user-${req.user.id}/${path}/${
req.params.id
}/${uuid.v4()}-${Date.now()}.${ext}`;
}
cb(null, filename);
},
}),
fileFilter: multerFilter,
limits: {
fileSize: 5000000,
},
});
return upload;
}
To consume the above, I import the class into any controller that I needed an upload feature and called the following.
Side Note : Ignore the paths code (It was just a way to generate unique file name for the files)
const upload = new FileUpload('image').upload('profile-images', 'image');
exports.uploadUserPhoto = upload.array('photos', 10);
I then used the uploadUserPhoto as a middleware before calling the following
exports.addToDB = catchAsync(async (req, res, next) => {
if (!req.files) return next();
req.body.photos = [];
Promise.all(
req.files.map(async (file, i) => {
req.body.photos.push(file.key);
})
);
next();
});
On a high-level overview, this is the flow, First, upload your photos to s3 and get the req.files, then look through that req.files object passing them into an array field on your req object then finally save them on your DB.
NOTE: You must promisify the req.file loop since the task is asynchrnous
My final router looked like this
router
.route('/:id')
.put(uploadUserPhoto, addToDB, updateProfile)
Item.js
Your model can have a field called images thats type array.
const mongoose = require("mongoose");
const ItemSchema = mongoose.Schema({
images: {
type: [],
},
});
module.exports = mongoose.model("Items", ItemSchema);
You map through the array of object and only extract the data you want to store, in this example it is the key which is the unique name given to every image thats uploaded.
route.js
router.post("/", verify, upload.array("image"), async (req, res) => {
const { files } = req;
const images = [];
files.map((file) => {
images.push(file.key);
});
try {
new Item({
images,
}).save();
res.status(200).send({message: "saved images to db"})
}catch(err){
res.status(400).send({message: err})
}
});
Let me know if this does what you wanted
I'm using the below function to get the files uploaded by multer in mongodb.The request is returning empty array.
exports.getPhotos = async (req, res) => {
const photos = await Photo.find()
.then(photos => {
res.status(200).json(photos);
})
.catch(err => res.status(500).json({message: "Something went wrong"}));
};
and this is the schema of the image. Is there any way to get the files without specifying the schema?
const mongoose = require("mongoose");
const {ObjectId} = mongoose.Schema;
const photoSchema = new mongoose.Schema({
lenght: {
type: String,
},
chunkSize: {
type: String,
required: true
},
uploadDate: {
type: Date,
},
filename: {
type: String,
},
md5: {
type: String,
},
contentType: {
type: String,
},
});
module.exports = mongoose.model("Photo", photoSchema);
i use gridfs so it could upload larger files too . a piece of sample code below
//Connecting to mongo
const conn = mongoose.createConnection(mongoURI);
//Init gfs
let gfs;
conn.once('open', ()=>{
gfs = GridFsStream(conn.db, mongoose.mongo);
gfs.collection('uploads');
})
//Creating Storage engine
const storage = new GridFsStorage({
url:mongoURI,
file: (req, file) => {
return new Promise((resolve, reject)=>{
crypto.randomBytes(16,(err, buf)=>{
if(err){
return reject(err)
}
const fileName = buf.toString('hex') + path.extname(file.originalname)
//bucket name should match the collection name
const fileInfo = {
filename:fileName,
bucketName:'uploads'
}
resolve(fileInfo);
})
})
}
})
const upload = multer({storage})
now use this upload const in your paths like the one below there are a few methods for upload like array , single and ... depends on number of files you are uploading . the 'uploadedFile' argument is the name of the file input and you should be consider setting it in your frontend.
app.post('/',upload.single('uploadedFile'),(req, res)=>{
res.json('file uploaded')
})
this upload middleware adds a files to your request which you can use to put file names in your database and later fetch it by that uniqe names with a route like the one below .
app.get('/:filename', (req, res)=>{
gfs.files.findOne({filename:req.params.filename},(err,file)=>{
if(!file || file.length === 0){
return res.status(404).json({
err:'No file Exists'
})
}
const readStream = gfs.createReadStream(file.filename);
readStream.pipe(res)
})
})