AWS S3 putObject callback not firing - node.js

I have a lambda function trying to place an mp3 file into an S3 bucket, however I am not seeing my file uploaded and more strangely do not see any logging/response from the callback.
My lambda/s3 bucket are all on the same AWS account and the bucket name is definitely correct.
Is there something I'm missing here? Or any explanation why my callback is not being fired?
exports.handler = async (event, context, callback) => {
// prior setup
console.log('about to putObject on s3');
const s3BucketData = {
Bucket: 'media-files',
Key: fileName,
Body: fileDataBuffer,
ContentType: 'audio/mp3'
};
await s3.putObject(s3BucketData, (err, data) => {
console.log('putObject callback executing');
if (err) {
console.log('err occurred storing to s3: ', err)
} else{
console.log(`${fileName} succuessfully uploaded`);
}
context.done();
});
};

First of all, it's a bad practice to stick your methods inside of the handler function. Second of all you have some issue with your runtime. I mean that you choosed node 8.10 with await/async support, but you still trying to use callbacks.
I have some comments for you. I hope it's going to help you.
1) You can simple do that:
export async function handler(event)
{
// body of your function
};
2) AWS services promisified. You have to re-write your s3 method. Take a look at the following snippet. And I've got a question. Are sure that you have to use putObject method instead of upload?
try
{
let s3= new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params =
{
Bucket: //aws s3 bucket location (a full path),
Key: //file name/key,
Body: //entity to upload,
ACL: 'public-read' // access policy,
ContentType: 'audio/mp3'
};
let s3Response = await s3.upload(params).promise();
// request successed
console.log(`File uploaded to S3 at ${s3Response.Bucket} bucket. File
location: ${s3Response.Location}`);
return s3Response.Location;
}
// request failed
catch (ex)
{
console.error(ex);
}
If you want to stick callbacks then:
s3.upload(params, (err, data) =>
{
console.log('putObject callback executing');
if (err)
{
console.error('err occurred storing to s3: ', err);
return ;
}
console.log(`${fileName} succuessfully uploaded`);
return data;
});
I hope you'll find this helpfull. Cheers!

Below example help you to convert putObject to promise
exports.handler = (event, context, callback) => {
console.log('about to putObject on s3');
const s3BucketData = {
Bucket: 'media-files',
Key: fileName,
Body: fileDataBuffer,
ContentType: 'audio/mp3'
};
S3.putObject(s3BucketData).promise()
.then(data => {
console.log('complete:PUT Object',data);
callback(null, data);
})
.catch(err => {
console.log('failure:PUT Object', err);
callback(err);
});
};

Related

Cannot get data.Location while uploading to AWS s3 bucket

I successfully upload my files to the aws s3 bucket, but cannot get its location back , to store it back to my DB.
Here is my function:
const uploadFile = (filename, key) => {
return new Promise((resolve, reject)=> {
fs.readFile(filename, (err, data) => {
if(err){
reject(err);
};
const params = {
Bucket: "BUCKET_NAME",
Key: `student_${key}`, // File name you want to save as in S3
Body: data,
ACL: 'public-read'
};
s3.upload(params, function(err, data){
if(err){
throw err;
}
resolve(data.Location);
});
});
})
};
My router :
uploadFile.uploadFile(request.file.path, request.file.originalname).then((addr) => {
student_photo = addr;
})
Eventually I get empty string (when I console.log this).
The decision I found was to create a Promise to a function uploadFile, which in terms make it "thenable". So in .then() part I make query request to store info in my SQL.

S3 callbacks get ignored

I'm trying to upload a base64 encoded image to S3 through this route, but the callbacks get completely ignored and the code jumps straight to res.json("SUCCESS");
route
AWS.config.update({
accessKeyId: "xxxxxxxxxxxxxx",
secetAccessKey: "xxxxxxxxxxxxxxxxxxxxxx",
region: "us-east-1"
});
const s3 = new AWS.S3();
....
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
await s3.putObject(params, function(err, data) {
if (err) res.json(err);
else res.json(data);
});
res.json("SUCCESS");
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});
Any help is much appreciated thanks!
EDIT FIXED:
I figured out what the problem was:
I had recently reformatted my computer which meant I had to reinstall AWS cli AND reconfigure aws creds.
That was it.
The AWS documentation for using-promises.
var s3 = new AWS.S3({apiVersion: '2006-03-01', region: 'us-west-2'});
var params = {
Bucket: 'bucket',
Key: 'example2.txt',
Body: 'Uploaded text using the promise-based method!'
};
var putObjectPromise = s3.putObject(params).promise();
putObjectPromise.then(function(data) {
console.log('Success');
}).catch(function(err) {
console.log(err);
});
You can also promisify all functions by using a library such as bluebird
AWS.config.setPromisesDependency(require('bluebird'));
Here's an example using your code
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
const data = await s3.putObject(params).promise()
res.json(data);
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});

AWS S3 Image uploaded is corrupted

when uploadingimages, If i use the data thats in my req.file.buffer which is an array of numbers.. the buffer. It uploads correctly the image to aws s3.
But i need to resize theimage before... so im trying to use jimp, like so:
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
And then pass it to aws settings:
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// here in body is a buffer just like the one in req.file.buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log(err);
}
console.log('****************** success');
});
But if i do this.. it uploads the image to aws s3.. but the image is corrupted
What im i doing here? i think aws s3 needs in the budy a buffer... and i think after jimp finished scaling the image.. that new buffer would work.. but it doesnt.. any ideas?
Full code:
exports.resize = async (req, res, next) => {
// check if there is no new file to resize
if (!req.file) {
next(); // skip to the next middlewaree
return;
}
const extension = req.file.mimetype.split('/')[1]
req.body.photo = `${uuid.v4()}.${extension}`
// now we resize
const photo = await jimp.read(req.file.buffer)
await photo.cover(300, 300);
AWS.config.update({
secretAccessKey: process.env.SECRETACCESSKEY,
accessKeyId: process.env.ACCESSKEYID,
region: 'us-east-1'
})
const s3 = new AWS.S3()
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// this line seems to be the issue..
// even though photo.bitmap.data its also a buffer
Body: photo.bitmap.data
};
s3.upload(params, function (err, data) {
if (err) {
console.log('%%%%%%%%%%%%%%% error in callback');
console.log(err);
}
console.log('****************** success');
console.log(data);
});
// await photo.write(`./public/uploads/${req.body.photo}`);
// once we have written the photo to our filesystem, keep going!
next()
};
I had have this problem too, to get the correct buffer of the result image we have to use Jimp's getBuffer function.
image.getBuffer(mime, cb);
Supported MIME types
Jimp.MIME_PNG; // "image/png"
Jimp.MIME_JPEG; // "image/jpeg"
Jimp.MIME_BMP; // "image/bmp"
But with Jimp.AUTO can have the mime type of the original image and use it.
You can read more of getBuffer function in https://www.npmjs.com/package/jimp
photo.getBuffer(Jimp.AUTO, function(error, result){
const params = {
Bucket: 'jamsession-images',
Key: req.body.photo,
// correct buffer
Body: result
};
s3.upload(...);
});

Query S3 json file in AWS

I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);

How can I return data from a callback function in Node.Js

I am trying to create a function that returns back either the error data from AWS or the { ETag: '"74..."' } data response from the callback. This code currently will write my buffer file to the the s3 bucket. But I want to return my etag number or the error data back from the function but I keep getting undefined. Any help would be appreciated.
function aws(file, name) {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function(err, data) {
if (err) {
console.log('Something went wrong')
return err;
} else {
console.log('Successfully uploaded image');
console.log(data);
return data;
}
});
}
var response = aws(buffer, 'file.png');
Solved my problem with a Promise. Hope this helps someone else someday :)
const aws = function (file, name) {
return new Promise((resolve, reject) => {
let s3 = new AWS.S3();
s3.putObject({
Bucket: 'Bucket-Name',
ACL: 'public-read',
Key: name,
Body: file
}, function (err, data) {
if (err) {
console.log('Something went wrong')
reject(err);
} else {
console.log('Successfully uploaded image');
resolve(data);
}
});
});
}
aws(buffer, 'file.png')
.then(response => {
res.set({ 'Content-Type': 'application/json' });
res.status(200);
res.send(response);
})
.catch(console.error);

Resources