AWS Lambda function timing out after successful completion - node.js

I have a lambda function that does work successfully. I am able to create a file on S3 and another FTP server. The issue is even after successfull completion it time outs. It just doesn't finish executing. I could see from the logs it takes around 2 seconds and the timeout specified is 5 seconds. How do I make my function stops as soon as it is done? Here is the complete code:
"use strict";
var config = require('./config/dev');
var sql = require("mssql");
var AWS = require('aws-sdk');
var PromiseFtp = require('promise-ftp');
var fs = require('fs');
const request = require('request');
exports.handler = (event, context, callback) => {
GetDataFromServer(event, context, callback);
};
function GetDataFromServer(event, context, callback) {
console.log("Fetching data from database...");
var keys = [], outputText = '';
sql.connect(config.db, err => {
if (err) {
console.log("Error while connecting database :- " + err);
return callback(true, 'Error in fetching records from database...');
}
else {
new sql.Request()
.input('ClientId', sql.Int, 469)
.execute('ForesightDailyDataPull', (err, result) => {
if (!err) {
//Create Text here and assign to "outputText"
CreateFileOnS3(outputText, event, context, callback);
}
else {
console.log('Error in fetching records from database...');
return callback(true, 'Error in fetching records from database...');
}
})
}
});
sql.on('error', err => {
console.log('Error in fetching records from database...');
return callback(true, 'Error in fetching records from database...');
})
}
function CreateFileOnS3(fileData, event, context, callback) {
const fileName = generateFileName();
console.log('Sending file to S3...');
const s3 = new AWS.S3(config.awsCredentials);
const params = {
Bucket: config.app.s3Bucket,
Key: fileName,
Body: fileData
};
s3.upload(params, function (s3Err, data) {
if (s3Err) {
console.log('There was an error creating file on S3');
return callback(true, 'There was an error creating file on S3');
}
else {
console.log(`File uploaded successfully at ${data.Location}`);
CreatefileOnFTP(fileData, fileName, event, context, callback);
}
});
}
function CreatefileOnFTP(fileData, fileName, event, context, callback) {
console.log('Sending file to FTP...');
var ftpObject = {
"fileData": fileData,
"fileName": fileName,
"ftpURL": config.ftpDetails.ftpProtocol + "://" + config.ftpDetails.host,
"ftpUserName": config.ftpDetails.user,
"ftpPassword": config.ftpDetails.password
};
request({
url: config.ftpUploadURL,
method: "POST",
json: true,
body: ftpObject
}, function (error, response, body) {
if (!error) {
console.log('File sent successfully to FTP server.');
return callback(null, 'File sent successfully to FTP...');
}
else {
console.log('An error occurred while sending file to FTP.');
return callback(true, 'Error in sending file to FTP...');
}
});
}
function generateFileName() {
var _d = new Date(),
y = _d.getFullYear(),
m = _d.getMonth() + 1,
d = _d.getDate();
return y + '-' + (m < 10 ? '0' + m : m) + '-' + (d < 10 ? '0' + d : d) + '.txt';
}

You should close the open sql connection after the function completes by calling sql.close() after the action resolves.
function GetDataFromServer(event, context, callback) {
console.log("Fetching data from database...");
var keys = [], outputText = '';
sql.connect(config.db, err => {
if (err) {
console.log("Error while connecting database :- " + err);
return callback(true, 'Error in fetching records from database...');
}
else {
new sql.Request()
.input('ClientId', sql.Int, 469)
.execute('ForesightDailyDataPull', (err, result) => {
if (!err) {
//Create Text here and assign to "outputText"
CreateFileOnS3(outputText, event, context, callback);
sql.close() //HERE
}
else {
console.log('Error in fetching records from database...');
sql.close() //HERE
return callback(true, 'Error in fetching records from database...');
}
})
}
});
sql.on('error', err => {
console.log('Error in fetching records from database...');
sql.close() //HERE
return callback(true, 'Error in fetching records from database...');
})
}

Related

How to handle large file processing through AJAX and express

I'm uploading a PDF file using AJAX and express and sending the data to external REST API for some file processing.
Whenever I upload more than 3MB file I will get an error in AJAX before the REST API response.
I get correct results from the external REST API, but processing takes more time so AJAX is not receiving the success msg.
How do I handle this?
Tried with AXIOS and other promise handler but same result
module.exports = (req, res) =>
{
var pdf = [],
form = new formidable.IncomingForm(),
pdfFile,
dirname = process.cwd();
form.multiples = false;
// Upload directory for the images
form.uploadDir = path.join(dirname, 'tmp_uploads');
form.on('file', function(name, file) {
let token;
console.log('FORM ON')
if (req.cookies.userData.token) {
token = req.cookies.userData.token;
let buffer = null,
type = null,
filename = '',
renameFile = '';
// Read a chunk of the file.
buffer = readChunk.sync(file.path, 0, 262);
// filename = Date.now() + '-' + file.name;
filename = file.name;
renameFile = path.join(dirname, 'uploads/' + filename);
fs.rename(file.path, renameFile, function(err) {
if (err) throw err;
console.log('renamed complete');
pdfFile = path.join(dirname, 'uploads/' + filename);
let readFileStream = fs.createReadStream(pdfFile),
data = '',
formData = {};
formData = {
name: filename,
my_file: readFileStream,
Width: 1024,
Height: 768
};
function postData() {
// Setting URL and headers for request
var options = {
url: EXTERNALURL,
headers: {
"Authorization": 'bearer ' + token,
"content-type": "multipart/form-data"
},
formData: formData
};
// Return new promise
return new Promise(function(resolve, reject) {
// Do async job
request.post(options, function(err, resp, body) {
if (err) {
reject(err);
} else {
resolve(body);
}
})
})
}
function getData(url) {
// Return new promise
return new Promise(function(resolve, reject) {
// Do async job
request.get({ url: url, encoding: null }, function(err, resp, body) {
if (err) {
reject(err);
} else {
resolve(body);
}
})
})
}
var errHandler = function(err) {
console.log(err);
}
var filePath;
function main() {
var dataPromise = postData();
// Get user details after that get followers from URL
dataPromise.then(JSON.parse, errHandler)
.then(function(result) {
fData = result;
var fileName = fData.Id,
file_url = fData.PresentationZipUrl;
filePath = path.join(dirname, fData.HtmlPath);
// Do one more async operation here
var downloadPromise = getData(file_url).then();
return downloadPromise;
}, errHandler)
.then(function(data) {
//console.log(data);
var zip = new AdmZip(data);
if (!fs.existsSync(filePath)) {
fs.mkdirSync(filePath);
zip.extractAllTo(filePath, true);
}
}, errHandler)
.then(function(data) {
console.log('Done');
res.status(200).json({ 'status': "success" });
}, errHandler);
}
// console.log('before');
main();
}); // END RENAME FILE
} else { //
console.log('ERROR')
res.redirect('/');
}
});
form.on('error', function(err) {
console.log('Error occurred during processing - ' + err);
});
// Invoked when all the fields have been processed.
form.on('end', function() {
console.log('All the request fields have been processed.');
});
// Parse the incoming form fields.
form.parse(req, function(err, fields, files) {})}
AJAX:
$.ajax({
enter code here
url: '/presentation/uploadFiles',
method: 'post',
data: formData,
processData: false,
contentType: false,
timeout: 0}).done(function(d) {
console.log(d);
if (d.status == 'success') {
location.reload()
}}).fail(function(e, t) {
console.log(e);
console.log(t)})
.always(function() {
});

Lambda Function Error : EROFS: read-only file system, open './tmp/test.zip' Process exited before completing request

I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.

async.eachSeries runs only once with async.waterfall inside for each iteration

I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});

How to upload files to s3 synchronously using node.js api

I have the following piece of code:
array.forEach(function (item) {
// *** some processing on each item ***
var params = {Key: item.id, Body: item.body};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data. ", err);
} else {
console.log("Success uploading data");
}});
});
Because s3bucket.upload is being executed asynchronously - the loop finishes before uploading all the items.
How can I force s3bucket.upload to be synchronous?
Meaning don't jump to next iteration until this item was uploaded (or failed) to S3.
Thanks
you can use https://github.com/caolan/async#each each or eachSeries
function upload(array, next) {
async.eachSeries(array, function(item, cb) {
var params = {Key: item.id, Body: item.body};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data. ", err);
cb(err)
} else {
console.log("Success uploading data");
cb()
}
})
}, function(err) {
if (err) console.log('one of the uploads failed')
else console.log('all files uploaded')
next(err)
})
}
Better to use promises as suggested in one of the comments:
const uploadToS3 = async (items) => {
for (const item of array) {
const params = { Key: item.id, Body: item.body };
try {
const data = await s3bucket.upload(params).promise();
console.log("Success uploading data");
} catch (err) {
console.log("Error uploading data. ", err);
}
}
}
You could pass a post back function, this way the rest of the code is executed only when the upload has been completed. This does not answer your question but could be an alternative option:
array.forEach(function (item) {
// *** some processing on each item ***
var params = {Key: item.id, Body: item.body};
var f1=function(){
// stuff to do when upload is ok!
}
var f2=function(){
// stuff to do when upload fails
}
s3bucket.upload(params, function(err, data) {
if (err) {
f2();
console.log("Error uploading data. ", err);
// run my function
} else {
// run my function
f1();
console.log("Success uploading data");
}});
});

async.parallel not executing last cllback function when using with lambda functions

I am writing a lambda function in node js. I have main js file (index.js) which based on input event calls a methods in 2nd js file (loader.ls).
The issue is that after the method execution completes in loader.js, the callback in index.js is not getting executed. Below is the code
Code in index.js
var yates = require("./yatesLoader");
module.exports.handler = function(event, context) {
if(event.app === undefined || event.data === undefined){
context.fail("invalid request");
}
if(event.app === 'YatesDataLoader'){
var result = yates.sendNotification(event.data, function(err, resp){
if(err){
console.log("Error : " + JSON.stringify(err));
context.done(null, "error occured");
}
console.log("response : " + JSON.stringify(resp));
context.done(null, resp); // SUCCESS with message
});
}
};
Code in loader.js
var config = require("./config");
var optDynamo = config.DynamoDb;
var optSlack = config.Slack;
var async = require("async");
var req = require("request");
var AWS = require("aws-sdk");
AWS.config.update({
region: optDynamo.region
});
var DynamoDb = new AWS.DynamoDB();
var sendNotification = function(data, callback){
async.parallel([
sendSlackNotification.bind(null, data, cb),
saveDataToDynamoDb.bind(null, data, cb)
], function(err, results){
if (err) {
console.error("Error JSON:", JSON.stringify(err, null, 2));
return callback(err, null);
} else {
console.log("Success:", JSON.stringify(results, null, 2));
return callback(null, results);
}
});
};
var cb = function(err, resp){
if(err){
console.log("Error");
}else {
console.log("success");
}
};
var saveDataToDynamoDb = function(data, cb){
var params = {
"TableName" : optDynamo.table,
"Item" : {
"TimeStamp" : {"S" : new Date().toString() },
"ErrorMessage" : {"S" : data }
}
};
console.log("adding new data to DynamoDb");
DynamoDb.putItem(params, function(err, data){
if (err) {
console.error("Unable to add item. Error JSON:", JSON.stringify(err, null, 2));
return cb(err, null);
} else {
console.log("Added item:", JSON.stringify(data, null, 2));
return cb(null, data);
}
});
};
var sendSlackNotification = function(data, cb){
var options = {
method : 'post',
body : {"text" : data},
json : true,
url : optSlack.url
};
console.log("sending msg to slack");
req(options, function(err, resp){
if(err){
console.error("Unable to send message to Slack. Error JSON:", JSON.stringify(err, null, 2));
return cb(err, null);
} else {
console.log("Message sent to Slack:", JSON.stringify(resp, null, 2));
return cb(null, resp);
}
})
};
module.exports = {sendNotification : sendNotification};
Can someone help in understanding what is wrong here.
Its because of the way you are using .bind(). It is changing the arguments that get passed in to saveDataToDynamoDb() and sendSlackNotification(). So what they see is actually:
var saveDataToDynamoDb = function(data, cb, next) { ....
where next is the callback that the async library is expecting you to call. So you could call next after cb.
var saveDataToDynamoDb = function(data, cb, next){
...
DynamoDb.putItem(params, function(err, data){
if (err) {
cb(err, null);
return next(err, null);
} else {
cb(err, null);
return next(null, data);
}
});
};

Resources