Query S3 json file in AWS - node.js

I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .

I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}

Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);

Related

JIMP Issues with saving within /tmp in AWS Lambda function

I have written a Lambda function which opens an image and modifies it, before saving it to the /tmp folder and uploading it to an S3 bucket. This works locally, but when I run in in Lambda I get an error stating no such file or directory, open '/tmp/nft.png. What could be causing this? I originally thought that it was an issue with the write function not being awaited, but I don't think this can be the case since it works fine locally.
var Jimp = require("jimp")
var fs = require('fs')
var path = require("path")
var AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: <removed>,
secretAccessKey: <removed>
})
var s3 = new AWS.S3()
async function updateImage() {
var img = await Jimp.read("base_img.png")
var font = await Jimp.loadFont("fonts/Audiowide.fnt")
img.print(<removed for simplicity>)
return img.write("/tmp/nft.png")
}
function uploadFile(id) {
return new Promise((resolve, reject) => {
fs.readFile("/tmp/nft.png", function (err, data) {
if (err) { throw err; }
params = {<removed>};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
reject(err)
} else {
console.log("Successfully uploaded data");
resolve()
}
});
});
})
}
exports.handler = async (event) => {
await updateImage()
await uploadFile(event.queryStringParameters.id)
return {
statusCode: 200,
body: JSON.stringify("Generated image")
}
}

Lambda : Unable to stream and upload data in s3

Using the below code, I'm trying to download a file from one S3 bucket and upload it to another S3 bucket programmatically. The code executing without any issues/exceptions but the file is not getting processed.
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
It seems that because you are using async handler, your function completes prematurely before its body has a chance to fully execute.
You can wrap your code in a promise, as shown in the AWS docs, to actually tell your function to wait for its entire body to execute:
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
const promise = new Promise(function(resolve, reject) {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
return promise
}

Node : Wait the python script to write the file then upload it to s3

I have done the following code. Where I create a file by a python script then upload it to S3 then give the user the ability to download it.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise(async (resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', {encoding:'utf8', flag:'r'});
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData+feature.userId+'/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
//console.log(`File uploaded successfully. ${data.Location}`);
});
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60*5
})
resolve(presignedURL)
})
});
}
But I have the download url before the file is been uploaded to S3, any idea on how I can make it wait till all finish ?
if you want to use s3.upload with a callback. you need to change your code as mentioned below.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise((resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', { encoding: 'utf8', flag: 'r' });
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData + feature.userId + '/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
throw err;
}
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60 * 5
})
//console.log(`File uploaded successfully. ${data.Location}`);
resolve(presignedURL)
});
})
});
}
The S3 upload method of the AWS SDK returns a Promise which can be awaited on.
For example:
await s3.upload(...)
Note that in this case the callback function to the Python script should be changed to an async function, in order to allow the await syntax. For example:
PythonShell.run('script.py', options, async function (err)

Upload multiple images with node in s3 map problem

I'm struggling finding a solution to upload two files to s3. I can upload one file with multer and I have learnt how to do it, but when I try to do a map inside all files in the formdata and upload each file, I push into an array each location URL which is the one I save in my database. Then I try to print each url but for my surprise they are print inside the if statement but not when I save it in the database outside the if. Could it be for an asychronous problem?.
Thanks.
tournamentsCtrl.createTournament = async (req, res) => {
var files_upload = []
if (req.files) {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
req.files.map((item) => {
var params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(item.path),
Key: `tournament_img/${uuidv4()/* +req.file.originalname */}`
};
await s3.upload(params, (err, data) => {
if (err) {
console.log('Error occured while trying to upload to S3 bucket', err);
}
if (data) {
fs.unlinkSync(item.path); // Empty temp folder
const locationUrl = data.Location;
files_upload.push(locationUrl);
console.log(files_upload)
}
});
});
}
console.log(files_upload)
const new_data = { ...JSON.parse(req.body.values), img_source: files_upload[0], info_url: files_upload[1] }
console.log(new_data)
const newUser = new Tournaments(new_data);
newUser
.save()
.then(user => {
res.json({ message: 'User created successfully', user });
})
.catch(err => {
console.log('Error occured while trying to save to DB');
});
};
If you look at the docs for upload it does not return a promise so you should not call await on it. The default map method is not compatible with async code in this form. You need to either use async.map or wrap the async code in a promise like
return await new Promise((resolve, reject) => {
...
if (data) {
fs.unlinkSync(item.path);
resolve(data.location);
}
}
Your other code has some issues as well. A map function should return a value. If you dont want to return anything you should use foreach.
This is a bad place to ask for code advice but something like the following
function uploadFile(s3, element) {
return new Promise((resolve, reject) => {
let folder;
if (element.fieldname.includes('img')) {
folder = 'club_images'
} else if (element.fieldname.inlcudes('poster')) {
folder = 'poster_tournament'
} else {
folder = 'info_tournament'
}
const params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(element.path),
Key: `${folder + '/' + uuidv4() + element.fieldname}`
};
s3.upload(params, (err, data) => {
if (err) {
return reject(err);
}
if (data) {
return fs.unlink(element.path, err=> {
if(err) {
console.error("Failed to unlink file", element.path);
}
return resolve({[element.fieldname]: data.Location});
}); // Empty temp folder
}
return resolve();
});
})
}
tournamentsCtrl.createTournament = async (req, res) => {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
try {
const uploadData = await Promise.all(req.files.map(element => uploadFile(s3, element)));
const returnData = Object.assign({}, ...uploadData);
console.log(Object.assign(JSON.parse(req.body.values), returnData));
} catch(e) {
console.error('Failed to upload file', e);
return res.sendStatus(500);
}
const newUser = new Tournaments(Object.assign(JSON.parse(req.body.values), files_upload));
console.log(newUser)
try {
const user = await newUser.save()
res.json({message: 'User created successfully', user});
} catch(e) {
console.error('Error occured while trying to save to DB');
return res.sendStatus(500);
}
};

errorMessage: "event is not defined" in lambda function nodejs

I am trying to run a lambda function attached to an API gateway GET request and below is the code
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const bucketName = "dhaval-upload";
let params = {
Bucket: bucketName,
Key: event.fileName
};
exports.handler = async (event, context, callback) => {
return await s3.getObject(params).promise()
.then((res) => {
return "abcd";
// return res.Body.toString('utf-8');
})
.catch((err) => {
return err;
});
};
but I am getting the below error
errorMessage: "event is not defined"
errorType: "ReferenceError"
But I don't understand the reason for this as I have another POST request running perfectly..
Any help will be highly appreciated
You need to place params inside your handler, like this:
exports.handler = async (event, context, callback) => {
let params = {
Bucket: bucketName,
Key: event.fileName
};
return await s3.getObject(params).promise()
.then((res) => {
return "abcd";
// return res.Body.toString('utf-8');
})
.catch((err) => {
return err;
});
};

Resources