I want to create a Lambda function that invoked whenever someone uploads to the S3 bucket. The purpose of the function is to take the uploaded file and if its a video file (mp4) so make a new file which is a preview of the last one (using ffmpeg). The Lambda function is written in nodejs.
I took the code here for reference, but I do something wrong for I get an error saying that no input specified for SetStartTime:
//dependecies
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var ffmpeg = require('fluent-ffmpeg');
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket;
var dstKey = "preview_" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the video type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the video type.");
return;
}
var videoType = typeMatch[1];
if (videoType != "mp4") {
callback('Unsupported video type: ${videoType}');
return;
}
// Download the video from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the video from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
console.log("response.Body:\n", response.Body);
ffmpeg(response.Body)
.setStartTime('00:00:03')
.setDuration('10') //.output('public/videos/test/test.mp4')
.toBuffer(videoType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to modify ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully modify ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
So what am I doing wrong?
Related
My lambda function is not generating thumbnail as expected. Here's is my node code. I uploaded a .zip file with node modules.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 250;
var MAX_HEIGHT = 250;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "thumb-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1].toLowerCase();
if (imageType != "jpg" && imageType != "png") {
callback(`Unsupported image type: ${imageType}`);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
console.log('Width logggg' + size.width);
console.log('height logggg' + size.height)
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
I've just pasted the code from this link
I implemented everything correct in Lambda, S3 trigger, and bucket names. But in Cloudwatch metrics, I'm getting this error.
Please tell me about my mistake.
How to fix this error.
The stack trace shows that the error is happening in the file /var/task/index.js on line 57, column 38 so that's the best place to start.
If I had to guess though, if we're assuming the transform function is what is in index.js, it might be that size does not have a width property. Might be worth logging there to see what's going on.
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width, // <-- here
MAX_HEIGHT / size.height
);
Idk what gm(response.Body).size() expects for its callback but it would be good to log the params that get passed here.
I have a AWS Lambda function written in Node that resizes images when a new image is uploaded to my S3 bucket. The function works flawlessly 99% of the time. It's the 1% that I am wondering about...
On random occasions, the function execution times out at 30 seconds (that's the timeout we have set for our function to run). I don't think upping this timeout is going to save us anything here because it is just flat out hanging.
Below are the logs from CloudWatch. Above the blue line is the failure and 1.5 minutes later (below the blue line) is when it retried and succeeds.
This is the code that is run in the function, upon uploading of an image to S3.
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
var path = require('path');
var MAX_WIDTH = 345;
var MAX_HEIGHT = 345;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log(
'Reading options from event:\n',
util.inspect(event, { depth: 5 })
);
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(
event.Records[0].s3.object.key.replace(/\+/g, ' ')
);
var dstBucket = process.env.S3_BUCKET;
var dstKey = event.Records[0].s3.object.versionId + path.extname(srcKey);
console.log('Image version: ', dstKey);
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback('Source and destination buckets are the same.');
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback('Could not determine the image type.');
return;
}
var imageType = typeMatch[1];
if (imageType != 'jpg' && imageType != 'png') {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall(
[
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject(
{
Bucket: srcBucket,
Key: srcKey,
},
next
);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height).toBuffer(imageType, function(
err,
buffer
) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject(
{
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType,
},
next
);
},
],
function(err) {
if (err) {
console.error(
'Unable to resize ' +
srcBucket +
'/' +
srcKey +
' and upload to ' +
dstBucket +
'/' +
dstKey +
' due to an error: ' +
err
);
callback(err);
} else {
console.log(
'Successfully resized ' +
srcBucket +
'/' +
srcKey +
' and uploaded to ' +
dstBucket +
'/' +
dstKey
);
callback(null, 'Success');
}
}
);
};
Are there any settings or ways to avoid these random timeouts? Is this a "boot up" issue where the previous time this function was run was 5 hours prior so it was sitting idle before executing again?
I am trying to do image optimisation through convert command. if i execute the convert command from command line
convert $1 -sampling-factor 4:2:0 -strip -quality 85 -interlace JPEG -colorspace RGB $2
the image converts good.
But if I execute the Lambda function, image quality is poor. Is there any other way to get this solved. my NodeJS function is
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
im.convert(["/tmp/"+srcKey,
'-sampling-factor','4:2:0',
'-strip',
'-quality', '85',
'-interlace', 'JPEG',
'-colorspace','RGB',
"/tmp/"+srcKey],
function(err, stdout){
if (err) throw err;
console.log('stdout:', stdout);
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
});
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
My intention is to have a Lambda function on every insert of the image into s3 bucket and use convert command from imagemagick for optimisation. If I use convert command in command line , image quality is good. But, if I use the convert command in Lambda function, image quality is bad. Could you help to solve the problem?
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
});
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
I am trying to download a file from s3 bucket and upload it to a different s3 bucket. I need to do some to other conversions before uploading. So, just want to try, downloading and uploading first. On execution , it says done downloading. But I am unable to upload the file. Not sure what the problem is. I have followed the advice from https://github.com/andrewrk/node-s3-client/blob/master/README.md
Uploading is not at all working. Can you please help. Thx.
You are trying to upload at the same time you are downloading...
You need to call upload inside downloader.on('end', method
I'm trying to follow the AWS guide for S3 and Lambda here
http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html
I'm at the manual testing stage, but I get "errorMessage": "Process exited before completing request" when I try to run the test.
In the log the fail point is something to do with the async.waterfall code.
When I run the Lambda function, it does get my example image, resize it and puts it into the new S3 bucket. Then it looks like async is trying to find an undefined "nextTask" to run. I've never used async before, so I'm not sure how to fix this.
The cloudwatch log file gives me this
2016-09-16T18:36:44.011Z 836d0280-7c3c-11e6-933a-9b5b3a5e8dd8 TypeError: undefined is not a function
at /var/task/ResizeImages.js:98:13
at /var/task/node_modules/async/dist/async.js:486:20
at nextTask (/var/task/node_modules/async/dist/async.js:5008:33)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:5015:17)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:339:31)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:847:20)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:355:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:615:14)
To install async (and imageMagik), I just used the command
npm install async gm
inside my node_modules folder.
The example node script I have used is here, line 98 is commented.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "thumb-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
} // ------- LINE 98 -----------
);
};
Posting the answer for visibility.
The runtime was set to nodejs rather than node4.3