Promises or Async with Node js - node.js

I have this large amount of code which gets an image from a S3 bucket, saves it to a temporary file on Lambda, resizes it to 4 different sizes, saves it into different folders according to size and them puts the images back into the s3 bucket also into different folders.
However when running on Lambda, I have to call context.done() at the end of the whole process otherwise the context will remain alive until Lambda times out.
So I need to call context.done() when upload returns for the last time.
Looking into the two options, async and promises, which would likely need less refactoring of my code to work?
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var fs = require("fs");
// get reference to S3 client
var s3 = new AWS.S3();
var _800px = {
width: 800,
destinationPath: "large"
};
var _500px = {
width: 500,
destinationPath: "medium"
};
var _200px = {
width: 200,
destinationPath: "small"
};
var _45px = {
width: 45,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
module to be exported when in production
ports.AwsHandler = function(event, context) {
// Read options from the event.
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstnFolder = "/tmp";
// function to determine paths
function _filePath (directory, i) {
if ( directory === false ) {
return "dst/" + _sizesArray[i].destinationPath + "/" + srcKey;
} else if ( directory === true ) {
return dstnFolder + "/" + _sizesArray[i].destinationPath + "/" + srcKey;
}
};
for ( var i = 0; i<len; i++) {
fs.mkdir("/tmp" + "/" + _sizesArray[i].destinationPath, function (err) {
if (err) {
console.log(err);
}
});
};
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
};
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
};
function download () {
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
function (err, response) {
if (err) {
console.error(err);
}
fs.writeFile("/tmp" + "/" + srcKey, response.Body, function (err) {
transform();
})
}
);
};
function transform () {
var _Key,
_Size;
for ( var i = 0; i<len; i++ ) {
// define path for image write
_Key = _filePath (true, i);
// define sizes to resize to
_Size = _sizesArray[i].width;
// resize images
gm("/tmp/" + srcKey)
.resize(_Size)
.write(_Key, function (err) {
if (err) {
return handle(err);
}
if (!err) {
// get the result of write
var readPath = this.outname;
var iniPath = this.outname.slice(4);
var writePath = "dst".concat(iniPath);
read(err, readPath, writePath, upload);
}
});
};
};
function read (err, readPath, writePath, callback) {
// read file from temp directory
fs.readFile(readPath, function (err, data) {
if (err) {
console.log("NO READY FILE FOR YOU!!!");
console.error(err);
}
callback(data, writePath);
});
};
function upload (data, path) {
// upload images to s3 bucket
s3.putObject({
Bucket: srcBucket,
Key: path,
Body: data,
ContentType: data.type
},
function (err) {
if (err) {
console.error(err);
}
console.log("Uploaded with success!");
});
}
download();

Take a look at how they use Q in this example.
Your code will end up very similar to
download()
.then(transform)
.then(read)
.then(upload)
.catch(function (error) {
// Handle any error from all above steps
console.error(error);
})
.done(function() {
console.log('Finished processing image');
context.done();
});
You could also take a look to async and use it as they show in this other example.

Related

Process exited before request Completed

I am using a lambda function to take an image from a bucket, duplicate it into another bucket. Then take the original image and resize it and copy it into the other bucket as well. Then after this is done, delete the original photo from the original bucket.
Start:
Bucket1 = image.jpg
Bucket2 = empty
END:
Bucket1 = empty
Bucket2 = imagecopy.jpg, imageresized.jpg
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
},
function copyImage(next) {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, next)
},
function deleteOrig(next) {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
})
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
LOGS:
START RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Version: $LATEST
2019-01-31T10:39:52.312Z b9ed83e2-4048-4434-bb5c-adf511b6200b Reading options from event:
{ Records:
[ { eventVersion: '2.0',
eventSource: 'aws:s3',
awsRegion: 'us-west-2',
eventTime: '1970-01-01T00:00:00.000Z',
eventName: 'ObjectCreated:Put',
userIdentity: { principalId: 'AIDAJDPLRKLG7UEXAMPLE' },
requestParameters: { sourceIPAddress: '127.0.0.1' },
responseElements:
{ 'x-amz-request-id': 'C3D13FE58DE4C810',
'x-amz-id-2': 'FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD' },
s3:
{ s3SchemaVersion: '1.0',
configurationId: 'testConfigRule',
bucket:
{ name: 'ciansource',
ownerIdentity: { principalId: 'A3NL1KOZZKExample' },
arn: 'arn:aws:s3:::ciansource' },
object:
{ key: 'football.jpg',
size: 1024,
eTag: 'd41d8cd98f00b204e9800998ecf8427e',
versionId: '096fKKXTRTtl3on89fVO.nfljtsv6qko' } } } ] }
2019-01-31T10:39:52.617Z b9ed83e2-4048-4434-bb5c-adf511b6200b TypeError: callback.call is not a function
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:364:18)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)
END RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b
REPORT RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Duration: 344.41 ms Billed Duration: 400 ms Memory Size: 1024 MB Max Memory Used: 39 MB
RequestId: b9ed83e2-4048-4434-bb5c-adf511b6200b Process exited before completing request
Thanks for having added the log. It is difficult to say just by reading your code. So I refactor it using the new async/await keywords in ES2017 and Nodev8.
I tested this code on my laptop and it works as expected.
In addition, it has the following advantage :
it is easier to read (developers spend much more time reading code that writing code)
it is easier to test as each function can be tested separatly
it is easier to reuse as each function can be used from other functions or modules.
Give it a try.
If, for some reason, you can not use ES2017 or Node v8, I would suggest to add console.log() in each of your waterfall functions to understand where it breaks.
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
// added for testing on laptop - you NEED TO delete this to run it from Lambda
const event = {
"Records" : [
{
"s3" : {
"bucket" : {
"name" : "test-so-sst"
},
"object" : {
"key" : "image.jpg"
}
}
}
]
}
async function download(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.getObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function transform(imageType, image) {
return new Promise((resolve, reject) => {
gm(image).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
reject(err);
} else {
resolve(buffer);
}
});
});
});
}
async function upload(dstBucket, dstKey, contentType, data) {
return new Promise((resolve, reject) => {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
}
async function copyImage(srcBucket, srcKey, dstBucket) {
return new Promise((resolve, reject) => {
s3.copyObject({
CopySource: srcBucket + '/' + srcKey,
Bucket: dstBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
async function deleteOrig(srcBucket, srcKey) {
return new Promise((resolve, reject) => {
s3.deleteObject({
Bucket: srcBucket,
Key: srcKey
}, (error, data) => {
if (error) {
console.log(error); reject(error);
} else {
resolve(data);
}
});
});
};
exports.handler = async function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", JSON.stringify(event, null,2));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "-resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback(`Unsupported image type: ${imageType}`);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
try {
let responseDownload = await download(srcBucket, srcKey);
let responseTransform = await transform(imageType, responseDownload.Body);
let responseUpload = await upload(dstBucket, dstKey, responseDownload.ContentType, responseTransform);
let responseCopy = await copyImage(srcBucket, srcKey, dstBucket);
let responseDelete = await deleteOrig(srcBucket, srcKey);
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
} catch (error) {
const message = 'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + error;
console.error(message);
callback(error, message);
}
callback(null, "success");
};
//to test from my laptop - you can safely remove this before to deploy to Lambda
exports.handler(event, null, (error, message) => {
if (error) console.log(error);
console.log(message);
})

Error: Stream yields empty buffer when reducing picture quality with gm on AWS Lambda

i've read all other topics and tried a few answers, but i can't seem to figure out why i get this error.
My code gets the uploaded pic in a S3 bucket, reduces the quality and puts it in a second bucket. Plain and simple.
With small/medium images everything works just fine, but if i upload something over 2 MB (more or less) i get the error in the title.
My Lambda function has 128MB and 3 minutes of timeout; here is the code:
const gm = require('gm').subClass({imageMagick: true});
const AWS = require('aws-sdk');
const async = require('async');
const S3 = new AWS.S3();
exports.handler = (event, context, callback) => {
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstBucket = "destinationbucket";
var dstKey = "resized-" + srcKey;
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1].toLowerCase();
if (imageType != "jpg" && imageType != "png" && imageType != "jpeg") {
callback('Unsupported image type: ${imageType}');
return;
}
async.waterfall([
function download(next) {
S3.getObject({Bucket : srcBucket, Key : srcKey}, next);
},
function transform(response, next) {
var img_quality_reduced = gm(response.Body);
img_quality_reduced.quality(75).toBuffer(function( error, buffer )
{
if( error ) { console.log( error ); return; }
next(null, response.ContentType, buffer);
}
);
},
function upload(contentType, data, next) {
S3.putObject({Bucket: dstBucket, Key: dstKey, Body: data}, next);
},
function ending(next) {
console.log('got to ending');
context.done();
}
], function (err) {
console.log(err);
context.done();
});
};
Any idea why is this happening? i have loaded async, gm and graphicsmagick to Lambda (as a zip file). all downloaded through npm

Node resize image and upload to AWS

I'm relatively new to node, and want to write a module that takes an image from an S3 bucket, resizes it and saves it to a temporary directory on Amazon's new Lambda service and then uploads the images back to the bucket.
When I run the code, none of my functions seem to be called (download, transform and upload). I am using tmp to create the temporary directory and graphicsMagick to resize the image.
What is wrong with my code?
I have defined the dependencies and the array outside of the module, because I have another which depends on these.
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var fs = require("fs");
var tmp = require("tmp");
// get reference to S3 client
var s3 = new AWS.S3();
var _800px = {
width: 800,
destinationPath: "large"
};
var _500px = {
width: 500,
destinationPath: "medium"
};
var _200px = {
width: 200,
destinationPath: "small"
};
var _45px = {
width: 45,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
exports.AwsHandler = function(event) {
// Read options from the event.
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstnKey = srcKey;
// create temporary directory
var tmpobj = tmp.dirSync();
// function to determine paths
function _filePath (directory, i) {
if (!directory) {
return "dst/" + _sizesArray[i].destinationPath + "/" + dstnKey;
} else {
return directory + "/dst/" + _sizesArray[i].destinationPath + "/" + dstnKey;
}
};
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
};
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
};
(function resizeImage () {
function download () {
console.log("started!");
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
function (err, response) {
if (err) {
console.error(err);
}
// call transform if successful
transform (response);
}
);
};
function transform (response) {
for ( var i = 0; i<len; i++ ) {
// define path for image write
var _Key = _filePath (tmpobj, i);
// resize images
gm(response.Body, srcKey)
.resize(_sizesArray[i].width)
.write(_Key, function (err) {
if (err) {
console.error(err);
}
upLoad ();
});
}
};
function upLoad () {
for ( var i = 0; i<len; i++ ) {
var readPath = _filePath (tmpobj, i);
var writePath = _filePath (i);
// read file from temp directory
fs.readFile(readPath, function (err, data) {
if (err) {
console.error(err);
}
// upload images to s3 bucket
s3.putObject({
Bucket: srcBucket,
Key: writePath,
Body: data,
ContentType: data.type
},
function (err) {
if (err) {
console.error(err);
}
console.log("Uploaded with success!");
});
})
}
// Manual cleanup of temporary directory
tmpobj.removeCallback();
};
}());
};
Here's a partial improvement, note the use of the async library. You will have issues in upLoad() because you are firing four asynchronous calls immediately (in the for loop) and there's no easy way to know when they are all done. (well, the easy way is to rewrite the function to use the async library, like async.forEach)
// dependencies
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var fs = require("fs");
var tmp = require("tmp");
var async = require("async");
// get reference to S3 client
var s3 = new AWS.S3();
var _800px = {
width: 800,
destinationPath: "large"
};
var _500px = {
width: 500,
destinationPath: "medium"
};
var _200px = {
width: 200,
destinationPath: "small"
};
var _45px = {
width: 45,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
exports.AwsHandler = function(event) {
// Read options from the event.
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstnKey = srcKey;
// create temporary directory
var tmpobj = tmp.dirSync();
// function to determine paths
function _filePath (directory, i) {
if (!directory) {
return "dst/" + _sizesArray[i].destinationPath + "/" + dstnKey;
} else {
return directory + "/dst/" + _sizesArray[i].destinationPath + "/" + dstnKey;
}
};
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
};
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
};
// Actually call resizeImage, the main pipeline function:
resizeImage(function(err){
// Done. Manual cleanup of temporary directory
tmpobj.removeCallback();
})
function resizeImage (callback) {
var s3obj = {
Bucket: srcBucket,
Key: srcKey
};
download(s3obj, function(response){
var gmConfigs = sizesArray.map(function(size, i){
return {
width: size.width
_Key: _filePath (tmpobj, i)
}
})
async.eachSeries(gmConfigs,
function(config, done){
transform(response, config.width, config._Key, done)
},
function(err){
if(err){
console.log(err);
} else {
upLoad();
// Further work is required to identify if all the uploads worked,
// and to know when to call callback() here
// callback();
}
})
})
}
function download (s3obj, callback) {
console.log("started!");
s3.getObject(s3obj, function (err, response) {
if (err) {
console.error(err);
}
// call transform if successful
callback(response);
});
};
function transform (response, width, _Key, callback) {
// resize images
gm(response.Body, srcKey)
.resize(width)
.write(_Key, function (err) {
if (err) {
console.error(err);
}
callback();
});
};
function upLoad () {
for ( var i = 0; i<len; i++ ) {
var readPath = _filePath (tmpobj, i);
var writePath = _filePath (i);
// read file from temp directory
fs.readFile(readPath, function (err, data) {
if (err) {
console.error(err);
}
// upload images to s3 bucket
s3.putObject({
Bucket: srcBucket,
Key: writePath,
Body: data,
ContentType: data.type
},
function (err) {
if (err) {
console.error(err);
}
console.log("Uploaded with success!");
});
})
}
};
};

Grunt trigger nodejs module and pass in parameter

I want to write a grunt file so that when a file is added to a image folder, grunt will trigger the following nodejs image resize module GruntHandler, passing in the path to the newly added file.
Has anyone had any experience with this?
I am somewhat lost here as how to set it all up and write the grunt file to do this.
This is the code I want to trigger.
// dependencies
var async = require('async');
var gm = require('gm').subClass({ imageMagick: true });
var util = require('util');
var fs = require("fs");
var _800px = {
width: 800,
destinationPath: "large"
};
var _500px = {
width: 500,
destinationPath: "medium"
};
var _200px = {
width: 200,
destinationPath: "small"
};
var _45px = {
width: 45,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
// handler for dev environment
exports.GruntHandler = function (event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcFile = event; // file being sent by grunt ---> string url to file
var dstnFile = "/dst";
// Infer the image type.
var typeMatch = srcFile.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcFile);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcFile);
return;
}
// Download the image from S3, transform, and upload to same S3 bucket but different folders.
async.waterfall([
function download(next) {
// Read the image from local file and pass into transform.
fs.readFile(srcFile, function (err, data) {
if (err) {
next(err);
}
next(data);
});
},
function transform(response, next) {
for (var i = 0; i<len; i++) {
// Transform the image buffer in memory.
gm(response.Body, srcFile)
.resize(_sizesArray[i].width)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
}
},
function upload(contentType, data, next) {
for (var i = 0; i<len; i++) {
// Stream the transformed image to a different folder.
fs.writeFile(dstnFile + "/" + _sizesArray[i].destinationPath + "/" + fileName, function (err, written, buffer) {
if (err) {
next(err);
}
});
}
}
], function (err) {
if (err) {
console.error(
'---->Unable to resize ' + srcFile +
' and upload to ' + dstnFile +
' due to an error: ' + err
);
} else {
console.log(
'---->Successfully resized ' + srcFile +
' and uploaded to ' + dstnFile
);
}
context.done();
}
);
console.log(" grunt handler called!");
};
You can use grunt-contrib-watch for this. Watch event should get called when new file is added (If watch doesnt work, you might be running into this https://github.com/gruntjs/grunt-contrib-watch/issues/166).
Call your function in watch event handler like following.
Use relative path of your file in place of .GruntHandler.js. In case the file is in the same directory, you can use it in the following way.
var GruntHandler = require("./GruntHandler.js").GruntHandler;
grunt.initConfig({
watch: {
scripts: {
files: ['images/*.*'],
},
},
});
grunt.event.on('watch', function(action, filepath, target) {
GruntHandler(filepath);
});

Nodejs image resizer with graphicmagick

I have the following nodejs code, which as is it is, gets an image from AWS, resizes it into 4 different sizes and then saves it back into the AWS bucket into separate folders. However I need to write it so that it can be run on the dev environment as well. How could I write this so that depending on the input (local file on a vagrant machine, or on the AWS server) different functions are called (what to listen to?). It is worth noting that I am using AWS's new service Lambda.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var util = require('util');
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var _800px = {
width: 800,
dstnKey: srcKey,
destinationPath: "large"
};
var _500px = {
width: 500,
dstnKey: srcKey,
destinationPath: "medium"
};
var _200px = {
width: 200,
dstnKey: srcKey,
destinationPath: "small"
};
var _45px = {
width: 45,
dstnKey: srcKey,
destinationPath: "thumbnail"
};
var _sizesArray = [_800px, _500px, _200px, _45px];
var len = _sizesArray.length;
console.log(len);
console.log(srcBucket);
console.log(srcKey);
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}
// Download the image from S3, transform, and upload to same S3 bucket but different folders.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
for (var i = 0; i<len; i++) {
// Transform the image buffer in memory.
gm(response.Body, srcKey)
.resize(_sizesArray[i].width)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
}
},
function upload(contentType, data, next) {
for (var i = 0; i<len; i++) {
// Stream the transformed image to a different folder.
s3.putObject({
Bucket: srcBucket,
Key: "dst/" + _sizesArray[i].destinationPath + "/" + _sizesArray[i].dstnKey,
Body: data,
ContentType: contentType
},
next);
}
}
], function (err) {
if (err) {
console.error(
'---->Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + srcBucket + '/dst' +
' due to an error: ' + err
);
} else {
console.log(
'---->Successfully resized ' + srcBucket +
' and uploaded to' + srcBucket + "/dst"
);
}
context.done();
}
);
};
I would go for creating two providers(modules) i.e fsProvider and awsProvider with download, transform and upload methods. Then in handler i will decide which provider to use depending on process.end.NODE_ENV (development or production).

Resources