Node.js Sendgrid attaching pdf files upto 7 MB crashes the server - node.js

I have a Node.js + Express application running on EC2 instance. Part of that application is to send mass email to all its users (by the admin) with the ability to attach files (max 5 files allowed).
Recently we tested it by attaching three pdf files roughly the size of 2.5 MB each. When the send button is pressed the application keeps spinning before culminating to 504 Gateway Timeout error. I feel that maybe the sendgrid code is unable to process the attachments and the node server crashes taking down the EC2 with it. When this happens the only way for me is to stop the ec2 instance and then start it again. Rebooting does not help.
Here is the code
router.js
var fs = require('fs');
var multer = require('multer');
const uploadsDir = './uploads';
// SET STORAGE
var storage = multer.diskStorage({
destination: function (req, file, cb) {
if (!fs.existsSync(uploadsDir)){
fs.mkdirSync(uploadsDir);
}
cb(null, uploadsDir);
},
filename: function (req, file, cb) {
cb(null, file.originalname);
}
});
var upload = multer({ storage: storage });
router.post('/send', upload.array('files', 5), async (req, res) => {
let subject = req.body.subject;
let message = req.body.message;
let result = message.replace(/(\r\n|\r|\n)/g, '<br>');
let bccReceiverList = [];
let whereCondition = {
isActive: true
};
let attachments = [];
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
attachments.push({
filename: file.originalname,
type: file.mimetype,
uploadPath: req.app.get('uploads')
});
});
}
let receiverUsers = await User.findAll({});
//find to whom we are sending the email to
for (let index = 0; index < receiverUsers.length; index++) {
const user = receiverUsers[index];
emailHandler.sendEmail(
{
receiver: user.email,
bccReceiver: bccReceiverList,
templateId: process.env.EMAIL_BLAST_TEMPLATE,
attachments: attachments.length > 0 ? attachments : []
},
{
subject: subject,
message: result
},
data => {}
);
}
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
fs.unlink(req.app.get('uploads') + '/' + file.originalname, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
res.send('file was deleted');
});
});
}
res.redirect('back');
});
then in the actual email handler function
var sg = require('#sendgrid/mail');
var fs = require('fs');
sg.setApiKey(process.env.SENDGRID_API_KEY);
exports.sendEmail = async function(email, payload, callback) {
let msg = {
to: email.receiver,
from: {email: 'admin#myapp.com', name: 'My App'},
subject: email.subject,
templateId: email.templateId,
dynamic_template_data: payload
};
//Buffer.from(fileContent).toString('base64')
if(email.attachments != null && email.attachments.length > 0) {
try {
let attachmentObjects = [];
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
const fileContent = fs.readFileSync(attachment.uploadPath + '/' + attachment.filename);
attachmentObjects.push({
content: Buffer.from(fileContent).toString('base64'),
filename: attachment.filename,
type: attachment.mimetype,
disposition: "attachment"
});
}
msg.attachments = attachmentObjects;
} catch (error) {
console.log(error);
callback({status: 500, message: 'Error while attaching files to email: ' + error.message});
}
}
if(email.hasOwnProperty('ccReceiver')) {
if(email.ccReceiver != null) {
msg.cc = email.ccReceiver;
}
}
if(email.hasOwnProperty('bccReceiver')) {
if(email.bccReceiver.length > 0) {
msg.bcc = email.bccReceiver;
}
}
sg.send(msg).then(() => {
console.log('---- email sent successfully');
// delete the attachment files from the uploads folder
if(email.attachments != null && email.attachments.length > 0) {
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
fs.unlink(attachment.uploadPath + '/' + attachment.filename, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
});
}
}
callback({status: 200, message: 'Email Sent Successfully'});
}).catch(error => {
//Log friendly error
console.error('error-------- : ' + error.toString());
//Extract error msg
const {
message,
code,
response
} = error;
//Extract response msg
const {
headers,
body
} = response;
callback({status: 500, message: 'Error while sending email: ' + error.message});
});
};
I even tried just attaching one pdf file (2.5 MB) to the email and it still failed. When I perform this same test with files with lesser size then it works smoothly. I am not really sure how to solve this problem.

Related

Lambda returns "Malformed Lambda proxy response"

weirdest thing ever, was trying out one of my endpoints in lambda, everything was working a few days back and nothing was changed.
Basically the functions runs fine up until the point where it needs to return a status code, for some reason, then it just returns a 502 and in the API Gateway it states "Malformed Lambda proxy response"
app.post("/api/v1/documents/create", async (req, res) => {
res.setHeader('Content-Type', 'application/json');
const filename = req.body.filename
const partner = req.body.partner
const payload = req.body
const uid = req.body.uid
console.log(payload)
try {
// Initial setup, create credentials instance.
const credentials = PDFServicesSdk.Credentials
.serviceAccountCredentialsBuilder()
.fromFile("./pdfservices-api-credentials.json")
.build();
// Setup input data for the document merge process.
const jsonString = payload,
jsonDataForMerge = jsonString;
// Create an ExecutionContext using credentials.
const executionContext = PDFServicesSdk.ExecutionContext.create(credentials);
// Create a new DocumentMerge options instance.
const documentMerge = PDFServicesSdk.DocumentMerge,
documentMergeOptions = documentMerge.options,
options = new documentMergeOptions.DocumentMergeOptions(jsonDataForMerge, documentMergeOptions.OutputFormat.PDF);
// Create a new operation instance using the options instance.
const documentMergeOperation = documentMerge.Operation.createNew(options);
// Set operation input document template from a source file.
const input = PDFServicesSdk.FileRef.createFromLocalFile('./darbo_sutartis.docx');
documentMergeOperation.setInput(input);
// Execute the operation and Save the result to the specified location.
documentMergeOperation.execute(executionContext)
.then(result => {
console.log("saving File to TMP?")
result.saveAsFile('/tmp/' + uid + '_' + partner + '.pdf')
const checkTime = 1000;
const timerId = setInterval(() => {
const isExists = fs.existsSync('/tmp/' + uid + '_' + partner + '.pdf', 'utf8')
if (isExists) {
console.log("\nFile written -> creating AWS Bucket")
const params1 = {
Bucket: "darbo-manija-docs",
Key: "employment_documents/" + uid + "/" + partner + "/",
};
s3.putObject(params1, (err, data) => {
if (err) {
console.log(err)
} else {
console.log(data)
}
});
console.log("\nAWS Bucket directory created...")
// do something here
console.log("\nUplaoding file to AWS\n")
fs.readFile('/tmp/' + uid + '_' + partner + '.pdf', function (err, data) {
if (err) throw err;
const pdf = data.toString('base64'); //PDF WORKS
const pdfNew = Buffer.from(pdf, 'base64')
const params = {
Bucket: 'darbo-manija-docs/employment_documents/' + uid + "/" + partner,
Key: uid + '_' + partner + '.pdf', // File name you want to save as in S3
Body: pdfNew, // <---------
ContentType: 'application/pdf'
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
res.status(400).send(JSON.stringify({
message: "ERR",
code: 0
}));
}
console.log(`\nFile uploaded successfully. ${data.Location}`);
console.log("\nCreating entry in Firebase")
var fb_ref = admin.database().ref('/documents');
fb_ref.push({
documentBucket: params.Bucket,
documentKey: params.Key,
candidate: partner,
employer: uid
})
.then(function (fb_ref) {
admin.database().ref('/documents').child(fb_ref.key).update({
documentID: fb_ref.key
})
});
console.log("\nFirebase entry created");
console.log("\nRemoving temp file...")
fs.unlinkSync('/tmp/' + uid + '_' + partner + '.pdf')
res.status(200).send(JSON.stringify({
result: pdf,
code: 100
}));
});
});
clearInterval(timerId)
}
}, checkTime)
})
.catch(err => {
if (err instanceof PDFServicesSdk.Error.ServiceApiError ||
err instanceof PDFServicesSdk.Error.ServiceUsageError) {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 400
}));
} else {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 401
}));
}
});
} catch (err) {
console.log('Exception encountered while executing operation', err);
}
});
No idea what is happening, read many posts regarding the same error, but none of them seem to have the same setup. Any suggestions? Thanks

Get progress of firebase admin file upload

I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}

Upload images from iOS is too slow

My React-Native iOS front-end can not upload images to my Node.JS (Express + Multer) back-end.
My front-end is React Native Android & iOS. The Android version works fine with no issues, however, uploading images from and iOS device doesn't work most of the time.
Once the upload request is sent, I can see the image file is added in FTP, however, very slowly, like a few KB every second. An image of 500 KB may take 3 minutes or more till the request times out. The file is added to the server partially and I can see change in size with each refresh.
Some [iOS] devices had no issues at all, uploads fast, however, the vast majority of devices are running into this issue.
No connectivity issues. The same host and network work perfectly with Android. Same with some iOS devices.
This is not limited to a specific iOS version or device. However, the devices who had the issue always have it, and those that don't, never have it.
How can I troubleshoot this?
POST request:
router.post('/image', (req, res) => {
console.log('image')
upload(req, res, (error) => {
if (error) {
console.log(error)
return res.send(JSON.stringify({
data: [],
state: 400,
message: 'Invalid file type. Only JPG, PNG or GIF file are allowed.'
}));
} else {
if (req.file == undefined) {
console.log('un')
return res.send(JSON.stringify({
data: [],
state: 400,
message: 'File size too large'
}));
} else {
var CaseID = req.body._case; // || new mongoose.Types.ObjectId(); //for testing
console.log(req.body._case + 'case')
var fullPath = "uploads/images/" + req.file.filename;
console.log(fullPath);
var document = {
_case: CaseID,
path: fullPath
}
var image = new Image(document);
image.save(function(error) {
if (error) {
console.log(error)
return res.send(JSON.stringify({
data: [],
state: 400,
message: 'bad request error'
}));
}
return res.send(JSON.stringify({
data: image,
state: 200,
message: 'success'
}));
});
}
}
});
});
Upload.js:
const multer = require('multer');
const path = require('path');
//image upload module
const storageEngine = multer.diskStorage({
destination: appRoot + '/uploads/images/',
filename: function (req, file, fn) {
fn(null, new Date().getTime().toString() + '-' + file.fieldname + path.extname(file.originalname));
}
});
const upload = multer({
storage: storageEngine,
// limits: {
// fileSize: 1024 * 1024 * 15 // 15 MB
// },
fileFilter: function (req, file, callback) {
validateFile(file, callback);
}
}).single('image');
var validateFile = function (file, cb) {
// allowedFileTypes = /jpeg|jpg|png|gif/;
// const extension = allowedFileTypes.test(path.extname(file.originalname).toLowerCase());
// const mimeType = allowedFileTypes.test(file.mimetype);
// if (extension && mimeType) {
// return cb(null, true);
// } else {
// cb("Invalid file type. Only JPEG, PNG and GIF file are allowed.")
// }
var type = file.mimetype;
var typeArray = type.split("/");
if (typeArray[0] == "image") {
cb(null, true);
}else {
cb(null, false);
}
};
module.exports = upload;
React Native Upload function:
pickImageHandler = () => {
ImagePicker.showImagePicker(this.options1, res => {
if (res.didCancel) {
} else if (res.error) {
} else {
this.setState({upLoadImage:true})
var data = new FormData();
data.append('image', {
uri: res.uri,
name: 'my_photo.jpg',
type: 'image/jpg'
})
data.append('_case',this.state.caseID)
fetch(url+'/image'
, {method:'POST',
body:data
}
)
.then((response) => response.json())
.then((responseJson) =>
{
this.setState(prevState => {
return {
images: prevState.images.concat({
key: responseJson._id,
src: res.uri
})
}
}
)
this.setState({upLoadImage:false})
})
.catch((error) =>
{
alert(error);
});
}
}
)
}
Any suggestions?
Thanks
I saw your answer from UpWork
please try this way,
I'm using API Sauce for API calls
export const addPartRedux = (data) => {
return (dispatch, getState) => {
console.log('addPArtREdux', data);
const values = {
json_email: data.token.username,
json_password: data.token.password,
name: data.text ? data.text : '',
car: data.selected.ID,
model: data.selectedSub.ID,
make_year: data.selectedYear,
type: data.type,
ImportCountry: data.import_image ? data.import_image : '',
FormNumber: data.number ? data.number : '',
do: 'insert'
};
const val = new FormData();
Object.keys(values).map((key) =>
val.append(key, values[key])
);
if (data.imageok) {
val.append('image', {
uri: data.image.uri,
type: data.image.type,
name: data.image.name
});
}
dispatch(loading());
api
.setHeader('Content-Type', 'multipart/form-data;charset=UTF-8');
api
.post('/partRequest-edit-1.html?json=true&ajax_page=true&app=IOS',
val,
{
onUploadProgress: (e) => {
console.log(e);
const prog = e.loaded / e.total;
console.log(prog);
dispatch(progress(prog));
}
})
.then((r) => {
console.log('Response form addPartRedux', r.data);
if (r.ok === true) {
const setting = qs.parse(r.data);
dispatch(addpart(setting));
} else {
dispatch(resetLoading());
dispatch(partstError('Error Loading '));
}
})
.catch(
(e) => {
console.log('submitting form Error ', e);
dispatch(resetLoading());
dispatch(partstError('Try Agin'));
}
);
};
};

Video Upload using formidable in Nodejs, Error found: post 404 / 502

I'm uploading video file from local to server and then I'll be uploading it to cdn,
the issue i'm facing is my code is running well on local but its not working when i patch it to server.
Here is my code
commonJs
$("#uploadVideo").click(function (e) {
var reader = new FileReader();
var fileInput = document.getElementById('Videofile');
var previewUrl = window.URL.createObjectURL(fileInput.files[0]);
$(".video").attr("src", previewUrl);
var videotype = "video/mp4";
var file_data = $("#Videofile").prop("files")[0];
if (!file_data.type.match(videotype)) {
return "alert('Please upload mp4 files')"
} else {
var metadata = {
'content-type': 'video/mp4',
'size': file_data.size,
'uploaded': new Date(),
}
reader.onload = function (e) {
$("file_data").text("File Content: " + reader.result); // Show the file content
}
reader.readAsBinaryString(file_data);
file_data.onloadedmetadata = function () {
alert("Meta data for audio loaded");
};
};
var form_data = new FormData();
form_data.append("file", file_data)
form_data.append("metdata", metadata)
for (var key of form_data.entries()) {
console.log(key[0] + ', ' + key[1]);
}
if (form_data != undefined) {
$.ajax({
type: "post",
contentType: false,
processData: false,
url: "/api/recordvideo",
data: form_data,
dataType: 'json',
success: function (result) {
if (result) {
$(".video").attr("src", result.videolink);
alert("Successfully Uploaded Video");
console.log("Successfully Uploaded Video");
} else {
alert("Error on Uploading Video");
console.log("Error on Uploading Video");
}
},
error: function (err) {
console.log("error");
}
});
}
e.preventDefault();
e.stopPropagation();
});
ServerSide
app.post('/api/recordvideo',Api.recordvideo);
var Upload = require('gcs-resumable-upload');
ApiService.recordvideo = function (req, res) {
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.on('fileBegin', function (name, file){
file.path = 'public/demo/' + file.name;
console.log("fileBegin: " + JSON.stringify(file));
});
form.on('file', function (name, file){
console.log('Uploaded ' + JSON.stringify(file));
var path = file.path;
console.log("came in cloud3 :" + JSON.stringify(path));
});
form.parse(req, function (err, fields, files) {
console.log("came in cloud0" + JSON.stringify(files));
filesdata = files;
});
console.log("came in cloud2");
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
console.log("came in cloud3 :" + JSON.stringify(path));
fs.createReadStream(path)
.pipe(Upload.upload({ bucket: '******', file: path, metadata: { contentType: this.openedFiles[0].type } }))
.on('finish', function (response) {
console.log("Successfully Uploaded Video :" + JSON.stringify(response));
res.send({ "status": false, "videolink": "https://****/****/" + filesdata.file.name });
});
});
//res.send({ "status": false, "err": null });
}
At start atleast it was uploading to server folder & then in chrome developers tool it used to give response: {readystate : 4, . . . }
And now, I made some changes then it doesnt even hit my api, After few seconds it gives error in chrome developer tools 404() / 502 ()
Well, I got the solution, Previously I was using gcs-resumable-upload module to upload, but now I tried with '#google-cloud/storage' module through which I was able to upload upto 9mb.
const Storage = require('#google-cloud/storage');
var db = req.db;
console.log("came in cloudupload");
var form = new formidable.IncomingForm();
var filesdata;
form.keepExtensions = true;
form.multiples = false;
form.parse(req, function (err, fields, files) {
filesdata = files;
});
form.on('end', function (fields, files) {
var userid = appconfig.ObjectID(appconfig.decrypt(req.signedCookies['gid']));
var path = this.openedFiles[0].path;
const storage = new Storage({
keyFilename: 'gcloudcred.json'
});
const myBucket = storage.bucket('onfvideo');
myBucket.upload(path).then((resp) => {
console.log('uploaded to' + resp);
res.send({ "status": true, "err": null });
}).catch(err => {
console.error('ERROR:', err);
res.send({ "status": false, "err": null });
});
});
};
The Limitation of 9mb I was facing due to .netframework data-transfer limit which i was able to resolve using
<system.web>
<customErrors mode="Off"/>
<httpRuntime targetFramework="4.5" maxRequestLength="7483648" />
</system.web>
Method 2: Using xhr calling RestApi
1. Generated Access token using google-auto-auth module
2. XMLHttpRequest
var fileInput = $("#Videofile").prop("files")[0];
var url = "https://www.googleapis.com/upload/storage/v1/b/bucketname/o?uploadType=media&name=" + fileInput.name;
var http = new XMLHttpRequest();
http.open('POST', url, true);
http.setRequestHeader('Content-type', 'video/mp4');
http.setRequestHeader("Authorization", "Bearer " + token);
http.send(fileInput);
http.onprogress = function (ev) {
if (ev.lengthComputable) {
var percentage = Math.round((ev.loaded / ev.total) * 100);
console.log("percent " + percentage + '%');
}else {
console.log("Unable to compute progress information since the total size is unknown");
}
}
http.onloadstart = function (ev) {console.log("start")}
http.onloadend = function (ev) {}
http.onreadystatechange = function () {
if (http.readyState == 4 && http.status == 200) {
var response = JSON.parse(http.responseText);
alert("Successfully Uploaded Video");
}
}

How to have multiple file names while uploading with multer

My code is as shown below:
const multer = require('multer');
let upload = multer();
let profile_image = '';
const storage = multer.diskStorage({
destination(req, file, callback) {
callback(null, './public/images')
},
filename(req, file, callback) {
profile_image = `${file.fieldname}-${Date.now()}${path.extname(file.originalname)}`;
callback(null, profile_image);
}
});
const userData = (req, res) => {
upload = multer({
limits: {
fileSize: 1000000,
files: 2
},
storage,
fileFilter(req, file, callback) {
const ext = path.extname(file.originalname);
if (ext !== '.png' && ext !== '.jpg' && ext !== '.gif' && ext !== '.jpeg') {
return callback(res.end('Only images are allowed'), null)
}
callback(null, true);
}
}).any();
upload(req, res, err => {
const foodtruck_name = req.body.foodtruck_name;
const foodtruck_tag = req.body.foodtruck_tag;
console.log(`foodname${foodtruck_name}`);
console.log("error" + err);
console.log("profile image" + profile_image);
if ((!foodtruck_name) || (foodtruck_name.trim() == '')) {
console.log("fooddddname " + foodtruck_name);
res.json({
status: '404',
message: 'Please enter valid foodtruck name'
});
} else {
const truck = new foodTruck();
truck.foodtruck_name = foodtruck_name,
truck.foodtruck_tag = foodtruck_tag,
awsUpload.fileUpload(profile_image).then((result) => {
truck.foodtruck_img = "https://myWebsite.com/" +
profile_image;
awsUpload.fileUpload(profile_image).then((result) => {
truck.foodtruck_logo = "https://myWebsite.com/" +
profile_image;
truck.save((err, trucSaved) => {
res.json({
status: '200',
message: 'Thanks for registering with quflip',
data: trucSaved
});
});
}).catch((errMsg) => {
res.json({
status: '400',
message: errMsg
})
});
}).catch((errMsg) => {
res.json({
status: '400',
message: errMsg
})
});
}
});
};
Here, I am able to upload multiple images successfully , but I am not able to get the names for each individual file while uploading. how can I have them inside upload(req, res, err => { function?

Resources