AWS Lambda Node function timeout occasionally - node.js

I have a AWS Lambda function written in Node that resizes images when a new image is uploaded to my S3 bucket. The function works flawlessly 99% of the time. It's the 1% that I am wondering about...
On random occasions, the function execution times out at 30 seconds (that's the timeout we have set for our function to run). I don't think upping this timeout is going to save us anything here because it is just flat out hanging.
Below are the logs from CloudWatch. Above the blue line is the failure and 1.5 minutes later (below the blue line) is when it retried and succeeds.
This is the code that is run in the function, upon uploading of an image to S3.
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
var path = require('path');
var MAX_WIDTH = 345;
var MAX_HEIGHT = 345;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log(
'Reading options from event:\n',
util.inspect(event, { depth: 5 })
);
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(
event.Records[0].s3.object.key.replace(/\+/g, ' ')
);
var dstBucket = process.env.S3_BUCKET;
var dstKey = event.Records[0].s3.object.versionId + path.extname(srcKey);
console.log('Image version: ', dstKey);
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback('Source and destination buckets are the same.');
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback('Could not determine the image type.');
return;
}
var imageType = typeMatch[1];
if (imageType != 'jpg' && imageType != 'png') {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall(
[
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject(
{
Bucket: srcBucket,
Key: srcKey,
},
next
);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height).toBuffer(imageType, function(
err,
buffer
) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject(
{
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType,
},
next
);
},
],
function(err) {
if (err) {
console.error(
'Unable to resize ' +
srcBucket +
'/' +
srcKey +
' and upload to ' +
dstBucket +
'/' +
dstKey +
' due to an error: ' +
err
);
callback(err);
} else {
console.log(
'Successfully resized ' +
srcBucket +
'/' +
srcKey +
' and uploaded to ' +
dstBucket +
'/' +
dstKey
);
callback(null, 'Success');
}
}
);
};
Are there any settings or ways to avoid these random timeouts? Is this a "boot up" issue where the previous time this function was run was 5 hours prior so it was sitting idle before executing again?

Related

AWS S3 Lambda thumbnail generation node error in cloud watch

My lambda function is not generating thumbnail as expected. Here's is my node code. I uploaded a .zip file with node modules.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 250;
var MAX_HEIGHT = 250;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "thumb-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1].toLowerCase();
if (imageType != "jpg" && imageType != "png") {
callback(`Unsupported image type: ${imageType}`);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
console.log('Width logggg' + size.width);
console.log('height logggg' + size.height)
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
I've just pasted the code from this link
I implemented everything correct in Lambda, S3 trigger, and bucket names. But in Cloudwatch metrics, I'm getting this error.
Please tell me about my mistake.
How to fix this error.
The stack trace shows that the error is happening in the file /var/task/index.js on line 57, column 38 so that's the best place to start.
If I had to guess though, if we're assuming the transform function is what is in index.js, it might be that size does not have a width property. Might be worth logging there to see what's going on.
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width, // <-- here
MAX_HEIGHT / size.height
);
Idk what gm(response.Body).size() expects for its callback but it would be good to log the params that get passed here.

s3 Lambda convert command

I am trying to do image optimisation through convert command. if i execute the convert command from command line
convert $1 -sampling-factor 4:2:0 -strip -quality 85 -interlace JPEG -colorspace RGB $2
the image converts good.
But if I execute the Lambda function, image quality is poor. Is there any other way to get this solved. my NodeJS function is
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
im.convert(["/tmp/"+srcKey,
'-sampling-factor','4:2:0',
'-strip',
'-quality', '85',
'-interlace', 'JPEG',
'-colorspace','RGB',
"/tmp/"+srcKey],
function(err, stdout){
if (err) throw err;
console.log('stdout:', stdout);
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
});
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
My intention is to have a Lambda function on every insert of the image into s3 bucket and use convert command from imagemagick for optimisation. If I use convert command in command line , image quality is good. But, if I use the convert command in Lambda function, image quality is bad. Could you help to solve the problem?

s3 bucket upload not working

var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
});
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
I am trying to download a file from s3 bucket and upload it to a different s3 bucket. I need to do some to other conversions before uploading. So, just want to try, downloading and uploading first. On execution , it says done downloading. But I am unable to upload the file. Not sure what the problem is. I have followed the advice from https://github.com/andrewrk/node-s3-client/blob/master/README.md
Uploading is not at all working. Can you please help. Thx.
You are trying to upload at the same time you are downloading...
You need to call upload inside downloader.on('end', method

AWS Lambda function exited before completing request

I'm trying to follow the AWS guide for S3 and Lambda here
http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html
I'm at the manual testing stage, but I get "errorMessage": "Process exited before completing request" when I try to run the test.
In the log the fail point is something to do with the async.waterfall code.
When I run the Lambda function, it does get my example image, resize it and puts it into the new S3 bucket. Then it looks like async is trying to find an undefined "nextTask" to run. I've never used async before, so I'm not sure how to fix this.
The cloudwatch log file gives me this
2016-09-16T18:36:44.011Z 836d0280-7c3c-11e6-933a-9b5b3a5e8dd8 TypeError: undefined is not a function
at /var/task/ResizeImages.js:98:13
at /var/task/node_modules/async/dist/async.js:486:20
at nextTask (/var/task/node_modules/async/dist/async.js:5008:33)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:5015:17)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:339:31)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:847:20)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:355:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:615:14)
To install async (and imageMagik), I just used the command
npm install async gm
inside my node_modules folder.
The example node script I have used is here, line 98 is commented.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "thumb-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
} // ------- LINE 98 -----------
);
};
Posting the answer for visibility.
The runtime was set to nodejs rather than node4.3

In amazon lambda, resizing multiple thumbnail sizes in parallel async throws Error: Stream yields empty buffer

I've adapted the Amazon example of resizing a photo in lambda to create multiple thumbnail sizes and run in parallel.
My code runs fine locally in a few seconds, but in the the lambda cloud, it will not run in parallel, throwing an error after resizing the first thumbnail size.. and if I switch it to be serial instead of parallel it takes around 60 seconds to run serially.
Why would running resize code in parallel in lambda cause the stream yields empty buffer error. How can I increase the performance so that I can create the sizes in a few seconds but still get good value and efficiency out of lambda in terms of processor cost?
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var SIZES = [100, 320, 640];
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstBucket = srcBucket + "-resized";
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return context.done();
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return context.done();
}
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
console.error("Destination bucket must not match source bucket.");
return context.done();
}
// Download the image from S3
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
function(err, response){
if (err)
return console.error('unable to download image ' + err);
var contentType = response.ContentType;
var original = gm(response.Body);
original.size(function(err, size){
if(err)
return console.error(err);
//transform, and upload to a different S3 bucket.
async.each(SIZES,
function (max_size, callback) {
resize_photo(size, max_size, imageType, original, srcKey, dstBucket, contentType, callback);
},
function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket
);
}
context.done();
});
});
});
};
//wrap up variables into an options object
var resize_photo = function(size, max_size, imageType, original, srcKey, dstBucket, contentType, done) {
var dstKey = max_size + "_" + srcKey;
// transform, and upload to a different S3 bucket.
async.waterfall([
function transform(next) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
max_size / size.width,
max_size / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
original.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, buffer);
}
});
},
function upload(data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
console.log('finished resizing ' + dstBucket + '/' + dstKey);
if (err) {
console.error(err)
;
} else {
console.log(
'Successfully resized ' + dstKey
);
}
done(err);
}
);
};
I just ran into the same issue tonight.
Although there may be something else you can do, I updated the memory of the lambda task and the buffer issue went away.
I'm resizing images around 2.1mb and 5000x3000 into 3 smaller sizes.
Duration: 11619.86 ms Billed Duration: 11700 ms Memory Size: 1024 MB
Max Memory Used: 582 MB
Hope that helps

Resources