Formidable couldn't parse large file - node.js

Below code works with the small files, but could upload files that is more than 50kb. I think there is something I should setting with the maxFileSize. I'm using Uppy at the client. After I console.log in the server, actually it does do the request. I don't get any clue in the Stackoverflow, really need help.
const upload = (req, res) => {
// formidable : to parse html form data
const form = new formidable.IncomingForm({ multiples: true, maxFileSize: 10000 * 1024 * 1024 })
const d = new Date();
// I have console.log here and everything seems fine
form.parse(req, (err, fields, files) => {
console.log('err', err) // returns nothing
console.log('files', files) // returns nothing
console.log('fields', fields) // returns nothing
if (err) {
console.log("Error parsing the files");
console.log(err);
return res.status(400).json({
message: "There was an error parsing the files",
status: "Fail",
error: err
})
}
for (let file in files) {
try {
if (files[file]) {
let oldPath = files[file]['path']
let rawData = fs.readFileSync(oldPath)
const month = parseInt(d.getMonth() + 1) < 10 ? '0' + parseInt(d.getMonth() + 1) : parseInt(d.getMonth() + 1)
let today = `${d.getFullYear()}_${month}_${d.getDate()}`
let folderPath = __basedir + `\\media\\uploads\\storage\\${today}\\`;
// folderPath = ..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\
if (!fs.existsSync(folderPath)) {
fs.mkdirSync(folderPath, {
recursive: true
});
}
// newPath =..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\WIN.jpg
let newPath = folderPath + files[file]['name']
let databasePath = `storage/${today}/${files[file]['name']}`;
let filename = files[file]['name'] // example_files.zip
if (fs.existsSync(newPath)){
// if file is existed then add Date.now()
let time = Date.now()
let filenameSplit = filename.split('.')
filename = filenameSplit[0] + '_' + time + '.' + filenameSplit[1]
// filename = WIN_1626750408096.jpg
newPath = folderPath + filename
databasePath = `storage/${today}/${filename}`;
}
fs.writeFile(newPath, rawData, async (err) => {
if (err) {
console.log(err);
return res.status(400).send({ "err": err })
}
const userToken = jwt.verify(fields.user, config.TOKEN_SECRET)
const newFiles = {
filename: filename,
user_id: ObjectId(userToken.id),
filepath: databasePath,
added_time: Date.now(),
}
const result = await db.collection("ate_files").insertOne(newFiles)
console.log(`Created with the following id: ${result.insertedId}`)
console.log(`Successfull upload ${newPath}`);
})
}
} catch (err) {
console.log(`Error: ${err}`);
return res.status(409).send({ "error": `${err}` })
}
}
})
return res.status(200).send({ "message": "Successfully uploadded the files" })
}

Your return res.status(200).send({ "message": "Successfully uploadded the files" }) is too soon, it should be in the callback.
It could be problematic on large files since the beginning of the big file would be received and then the client already receives a response which could logically cut the connection in http

Related

Lambda returns "Malformed Lambda proxy response"

weirdest thing ever, was trying out one of my endpoints in lambda, everything was working a few days back and nothing was changed.
Basically the functions runs fine up until the point where it needs to return a status code, for some reason, then it just returns a 502 and in the API Gateway it states "Malformed Lambda proxy response"
app.post("/api/v1/documents/create", async (req, res) => {
res.setHeader('Content-Type', 'application/json');
const filename = req.body.filename
const partner = req.body.partner
const payload = req.body
const uid = req.body.uid
console.log(payload)
try {
// Initial setup, create credentials instance.
const credentials = PDFServicesSdk.Credentials
.serviceAccountCredentialsBuilder()
.fromFile("./pdfservices-api-credentials.json")
.build();
// Setup input data for the document merge process.
const jsonString = payload,
jsonDataForMerge = jsonString;
// Create an ExecutionContext using credentials.
const executionContext = PDFServicesSdk.ExecutionContext.create(credentials);
// Create a new DocumentMerge options instance.
const documentMerge = PDFServicesSdk.DocumentMerge,
documentMergeOptions = documentMerge.options,
options = new documentMergeOptions.DocumentMergeOptions(jsonDataForMerge, documentMergeOptions.OutputFormat.PDF);
// Create a new operation instance using the options instance.
const documentMergeOperation = documentMerge.Operation.createNew(options);
// Set operation input document template from a source file.
const input = PDFServicesSdk.FileRef.createFromLocalFile('./darbo_sutartis.docx');
documentMergeOperation.setInput(input);
// Execute the operation and Save the result to the specified location.
documentMergeOperation.execute(executionContext)
.then(result => {
console.log("saving File to TMP?")
result.saveAsFile('/tmp/' + uid + '_' + partner + '.pdf')
const checkTime = 1000;
const timerId = setInterval(() => {
const isExists = fs.existsSync('/tmp/' + uid + '_' + partner + '.pdf', 'utf8')
if (isExists) {
console.log("\nFile written -> creating AWS Bucket")
const params1 = {
Bucket: "darbo-manija-docs",
Key: "employment_documents/" + uid + "/" + partner + "/",
};
s3.putObject(params1, (err, data) => {
if (err) {
console.log(err)
} else {
console.log(data)
}
});
console.log("\nAWS Bucket directory created...")
// do something here
console.log("\nUplaoding file to AWS\n")
fs.readFile('/tmp/' + uid + '_' + partner + '.pdf', function (err, data) {
if (err) throw err;
const pdf = data.toString('base64'); //PDF WORKS
const pdfNew = Buffer.from(pdf, 'base64')
const params = {
Bucket: 'darbo-manija-docs/employment_documents/' + uid + "/" + partner,
Key: uid + '_' + partner + '.pdf', // File name you want to save as in S3
Body: pdfNew, // <---------
ContentType: 'application/pdf'
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
res.status(400).send(JSON.stringify({
message: "ERR",
code: 0
}));
}
console.log(`\nFile uploaded successfully. ${data.Location}`);
console.log("\nCreating entry in Firebase")
var fb_ref = admin.database().ref('/documents');
fb_ref.push({
documentBucket: params.Bucket,
documentKey: params.Key,
candidate: partner,
employer: uid
})
.then(function (fb_ref) {
admin.database().ref('/documents').child(fb_ref.key).update({
documentID: fb_ref.key
})
});
console.log("\nFirebase entry created");
console.log("\nRemoving temp file...")
fs.unlinkSync('/tmp/' + uid + '_' + partner + '.pdf')
res.status(200).send(JSON.stringify({
result: pdf,
code: 100
}));
});
});
clearInterval(timerId)
}
}, checkTime)
})
.catch(err => {
if (err instanceof PDFServicesSdk.Error.ServiceApiError ||
err instanceof PDFServicesSdk.Error.ServiceUsageError) {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 400
}));
} else {
console.log('Exception encountered while executing operation', err);
res.status(400).send(JSON.stringify({
result: "Bad request",
code: 401
}));
}
});
} catch (err) {
console.log('Exception encountered while executing operation', err);
}
});
No idea what is happening, read many posts regarding the same error, but none of them seem to have the same setup. Any suggestions? Thanks

Node.js Sendgrid attaching pdf files upto 7 MB crashes the server

I have a Node.js + Express application running on EC2 instance. Part of that application is to send mass email to all its users (by the admin) with the ability to attach files (max 5 files allowed).
Recently we tested it by attaching three pdf files roughly the size of 2.5 MB each. When the send button is pressed the application keeps spinning before culminating to 504 Gateway Timeout error. I feel that maybe the sendgrid code is unable to process the attachments and the node server crashes taking down the EC2 with it. When this happens the only way for me is to stop the ec2 instance and then start it again. Rebooting does not help.
Here is the code
router.js
var fs = require('fs');
var multer = require('multer');
const uploadsDir = './uploads';
// SET STORAGE
var storage = multer.diskStorage({
destination: function (req, file, cb) {
if (!fs.existsSync(uploadsDir)){
fs.mkdirSync(uploadsDir);
}
cb(null, uploadsDir);
},
filename: function (req, file, cb) {
cb(null, file.originalname);
}
});
var upload = multer({ storage: storage });
router.post('/send', upload.array('files', 5), async (req, res) => {
let subject = req.body.subject;
let message = req.body.message;
let result = message.replace(/(\r\n|\r|\n)/g, '<br>');
let bccReceiverList = [];
let whereCondition = {
isActive: true
};
let attachments = [];
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
attachments.push({
filename: file.originalname,
type: file.mimetype,
uploadPath: req.app.get('uploads')
});
});
}
let receiverUsers = await User.findAll({});
//find to whom we are sending the email to
for (let index = 0; index < receiverUsers.length; index++) {
const user = receiverUsers[index];
emailHandler.sendEmail(
{
receiver: user.email,
bccReceiver: bccReceiverList,
templateId: process.env.EMAIL_BLAST_TEMPLATE,
attachments: attachments.length > 0 ? attachments : []
},
{
subject: subject,
message: result
},
data => {}
);
}
if(req.files && req.files.length > 0) {
req.files.forEach(file => {
fs.unlink(req.app.get('uploads') + '/' + file.originalname, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
res.send('file was deleted');
});
});
}
res.redirect('back');
});
then in the actual email handler function
var sg = require('#sendgrid/mail');
var fs = require('fs');
sg.setApiKey(process.env.SENDGRID_API_KEY);
exports.sendEmail = async function(email, payload, callback) {
let msg = {
to: email.receiver,
from: {email: 'admin#myapp.com', name: 'My App'},
subject: email.subject,
templateId: email.templateId,
dynamic_template_data: payload
};
//Buffer.from(fileContent).toString('base64')
if(email.attachments != null && email.attachments.length > 0) {
try {
let attachmentObjects = [];
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
const fileContent = fs.readFileSync(attachment.uploadPath + '/' + attachment.filename);
attachmentObjects.push({
content: Buffer.from(fileContent).toString('base64'),
filename: attachment.filename,
type: attachment.mimetype,
disposition: "attachment"
});
}
msg.attachments = attachmentObjects;
} catch (error) {
console.log(error);
callback({status: 500, message: 'Error while attaching files to email: ' + error.message});
}
}
if(email.hasOwnProperty('ccReceiver')) {
if(email.ccReceiver != null) {
msg.cc = email.ccReceiver;
}
}
if(email.hasOwnProperty('bccReceiver')) {
if(email.bccReceiver.length > 0) {
msg.bcc = email.bccReceiver;
}
}
sg.send(msg).then(() => {
console.log('---- email sent successfully');
// delete the attachment files from the uploads folder
if(email.attachments != null && email.attachments.length > 0) {
for (let index = 0; index < email.attachments.length; index++) {
const attachment = email.attachments[index];
fs.unlink(attachment.uploadPath + '/' + attachment.filename, function (err) {
if (err) {
console.error(err);
}
console.log('File has been Deleted');
});
}
}
callback({status: 200, message: 'Email Sent Successfully'});
}).catch(error => {
//Log friendly error
console.error('error-------- : ' + error.toString());
//Extract error msg
const {
message,
code,
response
} = error;
//Extract response msg
const {
headers,
body
} = response;
callback({status: 500, message: 'Error while sending email: ' + error.message});
});
};
I even tried just attaching one pdf file (2.5 MB) to the email and it still failed. When I perform this same test with files with lesser size then it works smoothly. I am not really sure how to solve this problem.

readFile synchronously nodejs

I am new to nodejs and just started learning. I need to read 5 json files and place them in an array. I have created 2 functions: readDirectory and processFile.
let transactionArray = [];
router.get('/', (req,res) => {
//joining path of directory
const directoryPath = path.join(__dirname, '../data');
readDirectory(directoryPath);
res.send(JSON.stringify(transactionArray))
})
readDirectory will get the dir and will read the filenames.
function readDirectory(directoryPath){
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using map
let fileSummary = files.map(file => {
//get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath +'/'+ file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
return
}
try {
let data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName);
} catch(err) {
console.error(err)
}
})
})
});
}
Then it will read the file using function processFile.
function processFile(data, categoryName)
{
let paymentSource = ''
if (categoryName == 'category1'){
paymentSource = categoryName +': '+ categoryName +' '+ data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName +': '+ data.extra.payer +'-'+ data.currency_code
} else {
paymentSource = 'Others'
}
let transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray);
}
The console log is something like this:
[{Transaction1}] [{Transaction1},{Transaction2}] [{Transaction1},{Transaction2},{Transaction3}]
but the result on the UI is only []
During debug, I noticed that it is not reading synchronously so I tried using readFileSync but it did not work. How can I read both functions synchronously so it will not give an empty array?
Do some playing around to understand what the fs functions do when they have callbacks, and when they're synchronous. From the code that you have we have make a few changes so that you don't have to use the synchronous functions from the file system library.
First of all you need to wait for all the asynchronous tasks to complete before returning response.
router.get('/', async (req, res) => {
// joining path of directory
const directoryPath = path.join(__dirname, '../data')
readDirectory(directoryPath).then(() => {
res.send(JSON.stringify(transactionArray))
}).catch(err => {
res.status(500).json(err)
})
})
Secondly, to keep the code as is as to teach you something about promises, lets wrap the first function in a promise.
function readDirectory (directoryPath) {
return new Promise((resolve, reject) => {
// passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// handling error
if (err) {
return console.log('Unable to scan directory: ' + err)
}
// listing all files using map
const fileSummary = Promise.all(
files.map(file => {
return new Promise((resolve, reject) => {
// get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath + '/' + file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
reject(err)
}
try {
const data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName).then(data => {
data()
})
} catch (err) {
console.error(err)
reject(err)
}
})
})
})
).then(() => {
resolve()
}).catch(err => {
reject(err)
})
})
})
}
Please refer to the bible (MDN) for javascript about promises -> https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
And finally wrap the processFile function in a promise
function processFile (data, categoryName) {
return new Promise((resolve, reject) => {
let paymentSource = ''
if (categoryName == 'category1') {
paymentSource = categoryName + ': ' + categoryName + ' ' + data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName + ': ' + data.extra.payer + '-' + data.currency_code
} else {
paymentSource = 'Others'
}
const transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray)
resolve()
})
}
What the heck am I doing? I'm just making your code execute asynchronous task, but wait for them to be completed before moving on. Promises are a way to handle this. You can easily pull this off with the FS synchronous functions, but this way you can learn about promises!

NodeJS itself keeps file EBUSY on Windows?

I created a simple function to process uploaded files. I'm using multer to process the multipart data into files. Then I use the code below to move the files around, and return data so my webpage knows how to display the images.
It seems that somehow NodeJS keeps the files open itself. I also created a function to remove the files, but this will give me an EBUSY error. If I try to remove through Windows, it says that NodeJS has te file locked. When I restart the NodeJS process and then re-request the delete URL, the file is removed correctly.
Is there some way I can force NodeJS to close the file resources? Or is there some other error in my script that I am missing?
I updated node to version 12.4.0 but this didn't help either.
Processing the uploads:
exports.handleFormNotes = async(req, res, next) => {
try {
const configVariables = req.app.get('configVariables');
const uploadSuffix = req.body.uploadFolderSuffix || '';
console.log('upload suffix', uploadSuffix);
if (!req.files.length) {
return;
}
const uploadedFiles = Array();
var destPath = configVariables['FormNotesUploadDirectory'];
if (uploadSuffix !== '')
destPath = destPath + '/' + uploadSuffix;
destPath = path.resolve(destPath);
// mkdirSync returns undefined, so run that first and see if the directory exists second.
if (!fs.mkdirSync(destPath, { recursive: true }) && !fs.existsSync(destPath)) {
console.log(destPath, 'does not exist!');
req.alertHandler.addAlert('Pad om afbeelding op te slaan is niet bereikbaar: ' + destPath, 'danger');
res.render('error');
return;
}
var baseUrlPath = configVariables['FormNotesUploadDocumentRoot'];
if (uploadSuffix != null) {
baseUrlPath = baseUrlPath + '/' + uploadSuffix;
}
for(const uploadedFile of req.files) {
let now = new Date();
let destFilename = getDateTime() + "_" + uploadedFile.originalname;
let destFilenameThumb = 'thumb_' + destFilename;
var fullDestination = path.resolve(destPath + '/' + destFilename);
var fullDestinationThumb = path.resolve(destPath + '/' + destFilenameThumb);
console.log('Copy src:', uploadedFile.path, fullDestination);
fs.copyFileSync(uploadedFile.path, fullDestination);
var unlinkResult = fs.unlinkSync(uploadedFile.path);
console.log('Unlink "' + uploadedFile.path + '", result after upload:', unlinkResult);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);
console.log('new file info thumb:', newFileInfo);
uploadedFiles.push({
'fullImg': baseUrlPath + '/' + destFilename,
'thumbImg' : baseUrlPath + '/' + destFilenameThumb,
'original': uploadedFile.originalname
});
}
// Push to backend
const data = {
files: [...uploadedFiles],
uploadSuffix: uploadSuffix
};
// Normally retVal should be the return data from OI. If anything goes wrong, retVal = 'error'
this.saveAttachment(req, res, data);
return res.send(data);
}
catch (err) {
console.log('Error handling from notes:', err);
req.alertHandler.addAlert('Error handling form notes: ' + err);
return 'error';
}
}
Removing the uploads:
exports.rmFormNote = async(req, res, data) => {
let retVal;
try {
const configVariables = req.app.get('configVariables');
const httpPath = req.query.img;
console.log('http path:', httpPath);
// Strip off the document root, but check if they are the same first
const firstPart = httpPath.substring(0, configVariables['FormNotesUploadDocumentRoot'].length);
console.log('same?', firstPart, configVariables['FormNotesUploadDocumentRoot']);
var relPath = httpPath;
if (firstPart == configVariables['FormNotesUploadDocumentRoot']) {
relPath = httpPath.substring(configVariables['FormNotesUploadDocumentRoot'].length + 1);
}
var parts = relPath.split('/');
parts[parts.length-1] = 'thumb_' + parts[parts.length-1];
var thumbPath = parts.join('/');
thumbPath = path.resolve(configVariables['FormNotesUploadDirectory'] + '/' + thumbPath);
console.log('thumbpath: ', thumbPath);
var fullPath = configVariables['FormNotesUploadDirectory'] + '/' + relPath;
var dest = path.resolve(fullPath);
console.log('dest: ', dest);
if (!fs.existsSync(dest))
throw "File not found";
fs.unlink(dest, (err) => {
if (err) throw err;
console.log('File deleted');
});
retVal = { result: true };
}
catch(err) {
console.log('Ohnoo', err);
retVal = { result: false, msg: err };
}
return res.send(retVal);
}
Turns out the thumbnail creator sharp was the problem, as stated in this github issue.
I just had to disable the cache, like so:
sharp.cache(false);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);

How to upload S3 files in KeystoneJS

I have an item called style which has 2 attributes, one which has raw css text and another which has an S3File.
Style.add({
...
css: { type: Types.Code, language: 'css' },
cssFile: {
type: Types.S3File,
s3path: 'uploads/assets',
},
...
});
I want to update the S3File with the contents of the css text.
function uploadCSStoAmazon(style) {
// Store css code in temporal file (with a md5 name)
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.css';
fs.writeFile(file_path, style.css, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
// style.cssFile = new Types.S3File();
// TODO upload file to amazon
style.cssFile._.uploadFile(file_path, true, function(err, fileData){
// TODO erase css file
});
});
}
...
var aStyle = new Style.model({
...
css: 'Some css string',
...
});
...
uploadCSStoAmazon(aStyle);
The cssFile attribute is undefined, I understand, but how could I create a new file and assign it to this attribute, and also upload the file?
I found out how, you can use the updateHandler that comes with Keystone. They're still using req.files form express 3.x though.
// A express file generator
function writeToFile(fileName, txt, ext, callback) {
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.' + ext, the_file = {};
fs.writeFile(file_path, txt, function(err) {
if(err) {
callback(null, err);
}
var stats = fs.statSync(file_path);
var fileSizeInBytes = stats["size"];
the_file.path = file_path;
the_file.name = fileName + '.' + ext;
the_file.type = 'text/' + ext;
the_file.size = fileSizeInBytes;
console.log("The file was cached!");
callback(the_file, err);
});
}
...
/**
* Update Style by ID
*/
exports.update = function(req, res) {
var data = (req.method == 'POST') ? req.body : req.query;
Style.model.findById(data._id).exec(function(err, item) {
if (err) return res.apiError('database error', err);
if (!item) return res.apiError('not found');
writeToFile(item.slug, data.css, 'css', function(req_file, err){
if (err) return res.apiError('update error during file cache', err);
req.files['cssFile_upload'] = req_file;
item.getUpdateHandler(req).process(data, function(err) {
if (err) return res.apiError('update error', err);
res.apiResponse({
success: true
});
}); // end process
}); // end writeToFile
});
};

Resources