I have written a Lambda function which opens an image and modifies it, before saving it to the /tmp folder and uploading it to an S3 bucket. This works locally, but when I run in in Lambda I get an error stating no such file or directory, open '/tmp/nft.png. What could be causing this? I originally thought that it was an issue with the write function not being awaited, but I don't think this can be the case since it works fine locally.
var Jimp = require("jimp")
var fs = require('fs')
var path = require("path")
var AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: <removed>,
secretAccessKey: <removed>
})
var s3 = new AWS.S3()
async function updateImage() {
var img = await Jimp.read("base_img.png")
var font = await Jimp.loadFont("fonts/Audiowide.fnt")
img.print(<removed for simplicity>)
return img.write("/tmp/nft.png")
}
function uploadFile(id) {
return new Promise((resolve, reject) => {
fs.readFile("/tmp/nft.png", function (err, data) {
if (err) { throw err; }
params = {<removed>};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
reject(err)
} else {
console.log("Successfully uploaded data");
resolve()
}
});
});
})
}
exports.handler = async (event) => {
await updateImage()
await uploadFile(event.queryStringParameters.id)
return {
statusCode: 200,
body: JSON.stringify("Generated image")
}
}
Related
Using the below code, I'm trying to download a file from one S3 bucket and upload it to another S3 bucket programmatically. The code executing without any issues/exceptions but the file is not getting processed.
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
It seems that because you are using async handler, your function completes prematurely before its body has a chance to fully execute.
You can wrap your code in a promise, as shown in the AWS docs, to actually tell your function to wait for its entire body to execute:
const AWS = AWSXRay.captureAWS(require('aws-sdk'))
const S3 = new AWS.S3()
const fs = require('fs')
exports.index = async (event, context) => {
const promise = new Promise(function(resolve, reject) {
var getParams = {
Bucket: 'my-s3-test-bucket1',
Key: 'SampleVideo.mp4'
}
const inputFilename = '/tmp/SampleVideo.mp4';
const writeStream = fs.createWriteStream(inputFilename)
new Promise ((resolve, reject) => {S3.getObject(getParams).createReadStream().pipe(writeStream).on('end', () => {console.log('end');return resolve();}).on('error',(error) => {console.log('error');return reject(error);})});
writeStream.on('finish', function () {
var putParams = {
Body: fs.createReadStream(inputFilename),
Bucket: 'my-s3-test-bucket2',
Key: 'transfer-' + 'OutputVideo.mp4',
}
S3.upload(putParams, function (err, data) {
if (err) console.log(err, err.stack)
else console.log('logging data' + data)
})
})
}
return promise
}
I am trying to read data from an json file in a lambda function using node js. Unable to retrieve the data. getting null response. My lambda function code as follows:
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-2'});
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
exports.handler = async function(event, context, callback) {
var bucketParams = {
Bucket : 'sample-bucket',
Key: 'employee.json'
};
s3.getObject(bucketParams, function(err, data) {
if (err) {
console.log("Error", err);
} else {
callback(null, data.Body.toString());
}
});
};
Response:
null
Since you are returning using callback, you should not use asnyc:
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-2'});
const s3 = new AWS.S3({apiVersion: '2006-03-01'});
exports.handler = function(event, context, callback) {
var bucketParams = {
Bucket : 'sample-bucket',
Key: 'employee.json'
};
s3.getObject(bucketParams, function(err, data) {
if (err) {
console.log("Error", err);
} else {
callback(null, data.Body.toString());
}
});
};
Also make sure that function has proper permissions an time to access S3.
I'm struggling finding a solution to upload two files to s3. I can upload one file with multer and I have learnt how to do it, but when I try to do a map inside all files in the formdata and upload each file, I push into an array each location URL which is the one I save in my database. Then I try to print each url but for my surprise they are print inside the if statement but not when I save it in the database outside the if. Could it be for an asychronous problem?.
Thanks.
tournamentsCtrl.createTournament = async (req, res) => {
var files_upload = []
if (req.files) {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
req.files.map((item) => {
var params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(item.path),
Key: `tournament_img/${uuidv4()/* +req.file.originalname */}`
};
await s3.upload(params, (err, data) => {
if (err) {
console.log('Error occured while trying to upload to S3 bucket', err);
}
if (data) {
fs.unlinkSync(item.path); // Empty temp folder
const locationUrl = data.Location;
files_upload.push(locationUrl);
console.log(files_upload)
}
});
});
}
console.log(files_upload)
const new_data = { ...JSON.parse(req.body.values), img_source: files_upload[0], info_url: files_upload[1] }
console.log(new_data)
const newUser = new Tournaments(new_data);
newUser
.save()
.then(user => {
res.json({ message: 'User created successfully', user });
})
.catch(err => {
console.log('Error occured while trying to save to DB');
});
};
If you look at the docs for upload it does not return a promise so you should not call await on it. The default map method is not compatible with async code in this form. You need to either use async.map or wrap the async code in a promise like
return await new Promise((resolve, reject) => {
...
if (data) {
fs.unlinkSync(item.path);
resolve(data.location);
}
}
Your other code has some issues as well. A map function should return a value. If you dont want to return anything you should use foreach.
This is a bad place to ask for code advice but something like the following
function uploadFile(s3, element) {
return new Promise((resolve, reject) => {
let folder;
if (element.fieldname.includes('img')) {
folder = 'club_images'
} else if (element.fieldname.inlcudes('poster')) {
folder = 'poster_tournament'
} else {
folder = 'info_tournament'
}
const params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(element.path),
Key: `${folder + '/' + uuidv4() + element.fieldname}`
};
s3.upload(params, (err, data) => {
if (err) {
return reject(err);
}
if (data) {
return fs.unlink(element.path, err=> {
if(err) {
console.error("Failed to unlink file", element.path);
}
return resolve({[element.fieldname]: data.Location});
}); // Empty temp folder
}
return resolve();
});
})
}
tournamentsCtrl.createTournament = async (req, res) => {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
try {
const uploadData = await Promise.all(req.files.map(element => uploadFile(s3, element)));
const returnData = Object.assign({}, ...uploadData);
console.log(Object.assign(JSON.parse(req.body.values), returnData));
} catch(e) {
console.error('Failed to upload file', e);
return res.sendStatus(500);
}
const newUser = new Tournaments(Object.assign(JSON.parse(req.body.values), files_upload));
console.log(newUser)
try {
const user = await newUser.save()
res.json({message: 'User created successfully', user});
} catch(e) {
console.error('Error occured while trying to save to DB');
return res.sendStatus(500);
}
};
I'm trying to upload a base64 encoded image to S3 through this route, but the callbacks get completely ignored and the code jumps straight to res.json("SUCCESS");
route
AWS.config.update({
accessKeyId: "xxxxxxxxxxxxxx",
secetAccessKey: "xxxxxxxxxxxxxxxxxxxxxx",
region: "us-east-1"
});
const s3 = new AWS.S3();
....
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
await s3.putObject(params, function(err, data) {
if (err) res.json(err);
else res.json(data);
});
res.json("SUCCESS");
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});
Any help is much appreciated thanks!
EDIT FIXED:
I figured out what the problem was:
I had recently reformatted my computer which meant I had to reinstall AWS cli AND reconfigure aws creds.
That was it.
The AWS documentation for using-promises.
var s3 = new AWS.S3({apiVersion: '2006-03-01', region: 'us-west-2'});
var params = {
Bucket: 'bucket',
Key: 'example2.txt',
Body: 'Uploaded text using the promise-based method!'
};
var putObjectPromise = s3.putObject(params).promise();
putObjectPromise.then(function(data) {
console.log('Success');
}).catch(function(err) {
console.log(err);
});
You can also promisify all functions by using a library such as bluebird
AWS.config.setPromisesDependency(require('bluebird'));
Here's an example using your code
router.post("/imageupload", async (req, res) => {
const base64 = req.body.base64;
try {
const params = {
Bucket: process.env.bucketName,
Key: "images/newImage",
Body: base64
};
const data = await s3.putObject(params).promise()
res.json(data);
} catch (e) {
console.log(e.message);
res.status(500).json(e.message);
}
});
I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);