I am using a lambda function to take an image from a bucket, duplicate it into another bucket. Then take the original image and resize it and copy it into the other bucket as well. Then after this is done, delete the original photo from the original bucket.
Start:
Bucket1 = image.jpg
Bucket2 = empty
END:
Bucket1 = empty
Bucket2 = imagecopy.jpg, imageresized.jpg
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
},
function copyImage(next) {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, next)
},
function deleteOrig(next) {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
})
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
LOGS:
START RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Version: $LATEST
2019-01-31T10:39:52.312Z b9ed83e2-4048-4434-bb5c-adf511b6200b Reading options from event:
{ Records:
[ { eventVersion: '2.0',
eventSource: 'aws:s3',
awsRegion: 'us-west-2',
eventTime: '1970-01-01T00:00:00.000Z',
eventName: 'ObjectCreated:Put',
userIdentity: { principalId: 'AIDAJDPLRKLG7UEXAMPLE' },
requestParameters: { sourceIPAddress: '127.0.0.1' },
responseElements:
{ 'x-amz-request-id': 'C3D13FE58DE4C810',
'x-amz-id-2': 'FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD' },
s3:
{ s3SchemaVersion: '1.0',
configurationId: 'testConfigRule',
bucket:
{ name: 'ciansource',
ownerIdentity: { principalId: 'A3NL1KOZZKExample' },
arn: 'arn:aws:s3:::ciansource' },
object:
{ key: 'football.jpg',
size: 1024,
eTag: 'd41d8cd98f00b204e9800998ecf8427e',
versionId: '096fKKXTRTtl3on89fVO.nfljtsv6qko' } } } ] }
2019-01-31T10:39:52.617Z b9ed83e2-4048-4434-bb5c-adf511b6200b TypeError: callback.call is not a function
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:364:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)
END RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b
REPORT RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Duration: 344.41 ms Billed Duration: 400 ms Memory Size: 1024 MB Max Memory Used: 39 MB
RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Process exited before completing request
Thanks for having added the log. It is difficult to say just by reading your code. So I refactor it using the new async/await keywords in ES2017 and Nodev8.
I tested this code on my laptop and it works as expected.
In addition, it has the following advantage :
it is easier to read (developers spend much more time reading code that writing code)
it is easier to test as each function can be tested separatly
it is easier to reuse as each function can be used from other functions or modules.
Give it a try.
If, for some reason, you can not use ES2017 or Node v8, I would suggest to add console.log() in each of your waterfall functions to understand where it breaks.
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
// added for testing on laptop - you NEED TO delete this to run it from Lambda
const event = {
"Records" : [
{
"s3" : {
"bucket" : {
"name" : "test-so-sst"
},
"object" : {
"key" : "image.jpg"
}
}
}
]
}
async function download(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.getObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function transform(imageType, image) {
return new Promise((resolve, reject) => {
gm(image).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
reject(err);
} else {
resolve(buffer);
}
});
});
});
}
async function upload(dstBucket, dstKey, contentType, data) {
return new Promise((resolve, reject) => {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
}
async function copyImage(srcBucket, srcKey, dstBucket) {
return new Promise((resolve, reject) => {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function deleteOrig(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
exports.handler = async function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", JSON.stringify(event, null,2));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback(`Unsupported image type: ${imageType}`);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
try {
let responseDownload = await download(srcBucket, srcKey);
let responseTransform = await transform(imageType, responseDownload.Body);
let responseUpload = await upload(dstBucket, dstKey, responseDownload.ContentType, responseTransform);
let responseCopy = await copyImage(srcBucket, srcKey, dstBucket);
let responseDelete = await deleteOrig(srcBucket, srcKey);
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
} catch (error) {
const message = 'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + error;
console.error(message);
callback(error, message);
}
callback(null, "success");
};
//to test from my laptop - you can safely remove this before to deploy to Lambda
exports.handler(event, null, (error, message) => {
if (error) console.log(error);
console.log(message);
})
Related
I am trying to do image optimisation through convert command. if i execute the convert command from command line
convert $1 -sampling-factor 4:2:0 -strip -quality 85 -interlace JPEG -colorspace RGB $2
the image converts good.
But if I execute the Lambda function, image quality is poor. Is there any other way to get this solved. my NodeJS function is
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
im.convert(["/tmp/"+srcKey,
'-sampling-factor','4:2:0',
'-strip',
'-quality', '85',
'-interlace', 'JPEG',
'-colorspace','RGB',
"/tmp/"+srcKey],
function(err, stdout){
if (err) throw err;
console.log('stdout:', stdout);
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
});
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
My intention is to have a Lambda function on every insert of the image into s3 bucket and use convert command from imagemagick for optimisation. If I use convert command in command line , image quality is good. But, if I use the convert command in Lambda function, image quality is bad. Could you help to solve the problem?
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var im = require('imagemagick');
var fs = require('fs');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
var s3 = require('s3');
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "xx",
secretAccessKey: "xx",
},
});
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
var params = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: srcBucket,
Key: srcKey,
},
};
var downloader = client.downloadFile(params);
downloader.on('error', function(err) {
console.error("unable to download:", err.stack);
});
downloader.on('progress', function() {
console.log("progress", downloader.progressAmount, downloader.progressTotal);
});
downloader.on('end', function() {
console.log("done downloading");
});
//upload a file
var uploadparams = {
localFile: "/tmp/"+srcKey,
s3Params: {
Bucket: dstBucket,
Key: dstKey,
},
};
var uploader = client.uploadFile(uploadparams);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount,
uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + destBucket + '/' + destKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + destBucket + '/' + destKey
);
}
}
);
};
I am trying to download a file from s3 bucket and upload it to a different s3 bucket. I need to do some to other conversions before uploading. So, just want to try, downloading and uploading first. On execution , it says done downloading. But I am unable to upload the file. Not sure what the problem is. I have followed the advice from https://github.com/andrewrk/node-s3-client/blob/master/README.md
Uploading is not at all working. Can you please help. Thx.
You are trying to upload at the same time you are downloading...
You need to call upload inside downloader.on('end', method
I want to create a Lambda function that invoked whenever someone uploads to the S3 bucket. The purpose of the function is to take the uploaded file and if its a video file (mp4) so make a new file which is a preview of the last one (using ffmpeg). The Lambda function is written in nodejs.
I took the code here for reference, but I do something wrong for I get an error saying that no input specified for SetStartTime:
//dependecies
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var ffmpeg = require('fluent-ffmpeg');
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket;
var dstKey = "preview_" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the video type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the video type.");
return;
}
var videoType = typeMatch[1];
if (videoType != "mp4") {
callback('Unsupported video type: ${videoType}');
return;
}
// Download the video from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the video from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
console.log("response.Body:\n", response.Body);
ffmpeg(response.Body)
.setStartTime('00:00:03')
.setDuration('10') //.output('public/videos/test/test.mp4')
.toBuffer(videoType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to modify ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully modify ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
So what am I doing wrong?
I'm trying to follow the AWS guide for S3 and Lambda here
http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html
I'm at the manual testing stage, but I get "errorMessage": "Process exited before completing request" when I try to run the test.
In the log the fail point is something to do with the async.waterfall code.
When I run the Lambda function, it does get my example image, resize it and puts it into the new S3 bucket. Then it looks like async is trying to find an undefined "nextTask" to run. I've never used async before, so I'm not sure how to fix this.
The cloudwatch log file gives me this
2016-09-16T18:36:44.011Z 836d0280-7c3c-11e6-933a-9b5b3a5e8dd8 TypeError: undefined is not a function
at /var/task/ResizeImages.js:98:13
at /var/task/node_modules/async/dist/async.js:486:20
at nextTask (/var/task/node_modules/async/dist/async.js:5008:33)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:5015:17)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:339:31)
at Response.<anonymous> (/var/task/node_modules/async/dist/async.js:847:20)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:355:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:615:14)
To install async (and imageMagik), I just used the command
npm install async gm
inside my node_modules folder.
The example node script I have used is here, line 98 is commented.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "thumb-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
} // ------- LINE 98 -----------
);
};
Posting the answer for visibility.
The runtime was set to nodejs rather than node4.3
I have the following nodejs code, which as is it is, gets an image from AWS, resizes it into 4 different sizes and then saves it back into the AWS bucket into separate folders. However I need to write it so that it can be run on the dev environment as well. How could I write this so that depending on the input (local file on a vagrant machine, or on the AWS server) different functions are called (what to listen to?). It is worth noting that I am using AWS's new service Lambda.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var util = require('util');
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var _800px = {
width: 800,
dstnKey: srcKey,
destinationPath: "large"
};
var _500px = {
width: 500,
dstnKey: srcKey,
destinationPath: "medium"
};
var _200px = {
width: 200,
dstnKey: srcKey,
destinationPath: "small"
};
var _45px = {
width: 45,
dstnKey: srcKey,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
console.log(len);
console.log(srcBucket);
console.log(srcKey);
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}
// Download the image from S3, transform, and upload to same S3 bucket but different folders.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
for (var i = 0; i<len; i++) {
// Transform the image buffer in memory.
gm(response.Body, srcKey)
.resize(_sizesArray[i].width)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
}
},
function upload(contentType, data, next) {
for (var i = 0; i<len; i++) {
// Stream the transformed image to a different folder.
s3.putObject({
Bucket: srcBucket,
Key: "dst/" + _sizesArray[i].destinationPath + "/" + _sizesArray[i].dstnKey,
Body: data,
ContentType: contentType
},
next);
}
}
], function (err) {
if (err) {
console.error(
'---->Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + srcBucket + '/dst' +
' due to an error: ' + err
);
} else {
console.log(
'---->Successfully resized ' + srcBucket +
' and uploaded to' + srcBucket + "/dst"
);
}
context.done();
}
);
};
I would go for creating two providers(modules) i.e fsProvider and awsProvider with download, transform and upload methods. Then in handler i will decide which provider to use depending on process.end.NODE_ENV (development or production).