Upload multiple images with node in s3 map problem - node.js

I'm struggling finding a solution to upload two files to s3. I can upload one file with multer and I have learnt how to do it, but when I try to do a map inside all files in the formdata and upload each file, I push into an array each location URL which is the one I save in my database. Then I try to print each url but for my surprise they are print inside the if statement but not when I save it in the database outside the if. Could it be for an asychronous problem?.
Thanks.
tournamentsCtrl.createTournament = async (req, res) => {
var files_upload = []
if (req.files) {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
req.files.map((item) => {
var params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(item.path),
Key: `tournament_img/${uuidv4()/* +req.file.originalname */}`
};
await s3.upload(params, (err, data) => {
if (err) {
console.log('Error occured while trying to upload to S3 bucket', err);
}
if (data) {
fs.unlinkSync(item.path); // Empty temp folder
const locationUrl = data.Location;
files_upload.push(locationUrl);
console.log(files_upload)
}
});
});
}
console.log(files_upload)
const new_data = { ...JSON.parse(req.body.values), img_source: files_upload[0], info_url: files_upload[1] }
console.log(new_data)
const newUser = new Tournaments(new_data);
newUser
.save()
.then(user => {
res.json({ message: 'User created successfully', user });
})
.catch(err => {
console.log('Error occured while trying to save to DB');
});
};

If you look at the docs for upload it does not return a promise so you should not call await on it. The default map method is not compatible with async code in this form. You need to either use async.map or wrap the async code in a promise like
return await new Promise((resolve, reject) => {
...
if (data) {
fs.unlinkSync(item.path);
resolve(data.location);
}
}
Your other code has some issues as well. A map function should return a value. If you dont want to return anything you should use foreach.

This is a bad place to ask for code advice but something like the following
function uploadFile(s3, element) {
return new Promise((resolve, reject) => {
let folder;
if (element.fieldname.includes('img')) {
folder = 'club_images'
} else if (element.fieldname.inlcudes('poster')) {
folder = 'poster_tournament'
} else {
folder = 'info_tournament'
}
const params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(element.path),
Key: `${folder + '/' + uuidv4() + element.fieldname}`
};
s3.upload(params, (err, data) => {
if (err) {
return reject(err);
}
if (data) {
return fs.unlink(element.path, err=> {
if(err) {
console.error("Failed to unlink file", element.path);
}
return resolve({[element.fieldname]: data.Location});
}); // Empty temp folder
}
return resolve();
});
})
}
tournamentsCtrl.createTournament = async (req, res) => {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
try {
const uploadData = await Promise.all(req.files.map(element => uploadFile(s3, element)));
const returnData = Object.assign({}, ...uploadData);
console.log(Object.assign(JSON.parse(req.body.values), returnData));
} catch(e) {
console.error('Failed to upload file', e);
return res.sendStatus(500);
}
const newUser = new Tournaments(Object.assign(JSON.parse(req.body.values), files_upload));
console.log(newUser)
try {
const user = await newUser.save()
res.json({message: 'User created successfully', user});
} catch(e) {
console.error('Error occured while trying to save to DB');
return res.sendStatus(500);
}
};

Related

Fetch multiple files and write to AWS S3 with nodejs Lambda function

I have an array of image urls that I get from an SQS message. I need to download the images and store them in an S3 bucket. If downloading or storing an image fails, I need to catch the error, so I can push the image to another SQS queue for retrying later.
What I have so far does download and store the images, but I don't know how to access the results of the fetch and putObject functions. Also I'm not sure if I'm going about this the right way or if there's a more efficient/better/elegant way to do this.
This is what I have now
const AWS = require("aws-sdk");
const fetch = require("node-fetch")
const s3 = new AWS.S3();
exports.handler = function(event, context) {
// SQS may invoke with multiple messages
for (const message of event.Records) {
const bodyData = JSON.parse(message.body);
const bucket = 'my_images_bucket';
const images = bodyData.images;
let urls = [];
for (const image of images) {
urls.push(image);
}
let promises = urls.map(image => {
fetch(image)
.then((response) => {
if (!response.ok) {
throw new Error('An error occurred while fetching ' + image + ': ' + response.statusText);
}
return response;
})
.then(async res => {
try {
const buffer = await res.buffer();
console.log(image);
// store
return s3.putObject(
{
Bucket: bucket,
Key: image,
Body: buffer,
ContentType: "image/jpeg"
}
).promise();
} catch (e) {
console.log('An error occurred while storing image ' + image + ': ' + e);
}
})
.catch((error) => {
console.error(error);
});
});
Promise.all(promises)
.then(d => {
console.log('All images downloaded.');
console.log('PromiseAll result: ' + d);
}).catch(e => {
console.log('Whoops something went wrong!', e);
});
}
}
The output I get from this:
INFO All images downloaded.
INFO PromiseAll result: ,,,,
INFO https://myserver/10658272812/image14.jpg
INFO https://myserver/10658272810/image12.jpg
INFO https://myserver/10658272804/image6.jpg
INFO https://myserver/10658272813/image15.jpg
INFO https://myserver/10658272816/image18.jpg
I attach the code that I wrote for a similar problem.
const s3Put = (filename, data, mime, s3Params = {}) => {
return new Promise((resolve, reject) => {
s3.putObject({
Bucket: bucket,
Key: filename,
Body: data,
ContentType:
...s3Params
}, (err, data) => {
if (err) {
return reject(err);
}
return resolve(data);
});
});
};
let filePromise = s3Put(to, content, fileMime, s3Params)
.then(() => console.log("MyCode"))
.catch(err => {
const error = {
message: `S3: ${(err.pri && err.pri.message) || (err.internal && err.internal.message)}`,
to
};
errors.push(error);
return onError(error);
});

JIMP Issues with saving within /tmp in AWS Lambda function

I have written a Lambda function which opens an image and modifies it, before saving it to the /tmp folder and uploading it to an S3 bucket. This works locally, but when I run in in Lambda I get an error stating no such file or directory, open '/tmp/nft.png. What could be causing this? I originally thought that it was an issue with the write function not being awaited, but I don't think this can be the case since it works fine locally.
var Jimp = require("jimp")
var fs = require('fs')
var path = require("path")
var AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: <removed>,
secretAccessKey: <removed>
})
var s3 = new AWS.S3()
async function updateImage() {
var img = await Jimp.read("base_img.png")
var font = await Jimp.loadFont("fonts/Audiowide.fnt")
img.print(<removed for simplicity>)
return img.write("/tmp/nft.png")
}
function uploadFile(id) {
return new Promise((resolve, reject) => {
fs.readFile("/tmp/nft.png", function (err, data) {
if (err) { throw err; }
params = {<removed>};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
reject(err)
} else {
console.log("Successfully uploaded data");
resolve()
}
});
});
})
}
exports.handler = async (event) => {
await updateImage()
await uploadFile(event.queryStringParameters.id)
return {
statusCode: 200,
body: JSON.stringify("Generated image")
}
}

How to delete image on Aws s3 with Node and Express

I am working on Nodejs and Expressjs applications. I want to delete an image on Aws-s3 image which I uploaded with multer-s3.
I have tried so many examples that I saw online but none of them worked.
I tried this example but it didn't work:
router.delete("/:id/delete", async (req, res) => {
const params = {
Bucket: bucketname,
Key:
"https://nodeilders.s3.us-east-
2.amazonaws.com/public/uploads/programImages/church4%20%281%29.jpeg",
};
s3.deleteObject(params, (error, data) => {
if (error) {
res.status(500).send(error);
} else {
console.log("File has been deleted successfully");
}
});
});
and I also tried this example but it didn't work.
router.delete("/:id/delete", async (req, res) => {
const s3delete = function (params) {
return new Promise((resolve, reject) => {
s3.createBucket({
Bucket: BUCKET_NAME /* Put your bucket name */
}, function () {
s3.deleteObject(params, function (err, data) {
if (err) console.log(err);
else
console.log(
"Successfully deleted file from bucket";
);
console.log(data);
});
});
});
};
});
The first example logged that the file was successfully deleted but it was not deleted.
What can I try to resolve this?

upload files to aws s3 from node js

I am using aws sdk to uplod user input image and then get the image link from aws and i will store the link in mongoDB. In that case when i run .upload() it is async.
const imgSRC = [];
for (let img of image) {
console.log(img);
const params = {
Bucket: process.env.AWS_BUCKET,
Key: `${img.originalname}_${userID}`,
Body: img.buffer,
};
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
imgSRC.push(data.Location);
console.log(imgSRC);
});
}
const newPost = new Post({
userID: userID,
contentID: contentID,
posts: [
{
caption: caption,
data: imgSRC,
},
],
});
const post = await newPost.save();
in that case when the .save to mongodb run, there is no imgLinks from aws yet. How can i fix that things.
I've already tried async and it didn't work
You need to use Promise.all() in this manner
const uploadImage = (obj) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: obj.key,
Body: obj.body,
}
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
return reject(error);
}
return data;
});
})
}
const mainFunction = async () => {
const promises = [];
for (let img of image) {
const options = {
key: `${img.originalname}_${userID}`,
body: img.buffer
};
promises.push(uploadImage(options));
}
const result = await Promise.all(promises);
const imgSRC = result.map((r) => { return r.Location });
return imgSRC;
}
If you use await on s3.upload method you should remove the callback for this method.
try {
const data = await s3.upload(params);
imgSRC.push(data.Location);
console.log(imgSRC);
} catch(e) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
Let me know if it works.

Query S3 json file in AWS

I have json file uploaded to s3
then I wrote the following code to Query this file
const aws = require('aws-sdk');
const s3 = new aws.S3();
const bucket = 'hotels.mserver.online';
const objectKey = 'hotelsrates.json';
exports.handler = (event,context,callback) => {
// TODO implement
const response = getS3Objects(bucket,objectKey); //s3.listObjectsV2({}).promise();
console.log(response);
};
function getS3Objects(bucket,key) {
return s3.getObject({ Bucket:bucket, Key:key, ResponseContentType:'application/json '})
.promise().then(file => { return file })
.catch(error => { return error });
}`
but the result is getting null .
I understand what you are trying to accomplish here but that is not the right way to do it.
function getS3Objects(bucket,key){
return s3.getObject({Bucket:bucket,Key:key,ResponseContentType:'application/json'})
.promise().then(file=>{return file})
.catch(error =>{return error});
}`
The part above will still return a promise object, which means that you need to handle it accordingly. Instead of const response = getS3Objects(bucket,objectKey); you want to do
getS3Objects(bucket,objectKey).then(response => console.log(response));
Inside of your handler function.
Furthermore, your usage of s3.getObject function is incorrect. Where first argument is an object - parameters, and the second argument is a callback function.
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data);
Therefore in your case, you want to modify your getS3Objects function a bit. If you want to use promises, then you can do it like this.
function getS3Objects(bucket, key) {
return new Promise((resolve, reject) => {
s3.getObject(
{
Bucket: bucket,
Key: key,
ResponseContentType: 'application/json'
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
Another way that you can do this is as follows:
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
async function readFile(Bucket, Key) {
const params = {
Bucket,
Key,
ResponseContentType: 'application/json',
};
const f = await s3.getObject(params).promise();
return f.Body.toString('utf-8');
}
readFile('mybucket', 'xyz.json').then(console.log);

Resources