ASW-SDK does not return mutliple images - node.js

I have a node mutler and aws-sdk image upload project to aws s3, I am facing issues with getting the images back.
s3.js
const fs = require('fs');
const S3 = require('aws-sdk/clients/s3');
// function that uploads file to s3
function uploadFile(file){
const fileStream = fs.createReadStream(file.path);
const uploadParams = {
Bucket: bucketName,
Body: fileStream,
Key: file.originalname
};
return s3.upload(uploadParams).promise();
}
// function downloads file from s3
function getFileStream(fileKey){
const downloadedParams = {
Bucket: bucketName,
Key: fileKey
};
return s3.getObject(downloadedParams).createReadStream();
// return s3.getObject(downloadedParams);
}
Here is the controller.js
exports.getUserImages = async (req, res) => {
try {
const userID = req.params.userId;
const images = await Gallery.find({userID});
if (! images) {
res.status(400).json({message: 'No images found'});
return;
}
console.log(images);
// find show all those images
for (const image of images){
const readStream = getFileStream(image.key);
readStream.pipe(res);
}
} catch (e) {
res.status(500).json({message: e.message});
}
};
This works when I use findOne and it displays the image, but when I try to get all the images it does not work, it is just a blank screen. I have the names of the images as Key in my local database and my asw s3 bucket is public I only want to access the image not download.
mongodb
key:"PIC.jpeg"

Related

Node.js async calls do not run in sequence

I am completely new to node js .
I am trying to code below steps:
Download a file from AWS S3 folder
Then upload it to some other AWS s3 folder.
So I have searched online and created similar code in node js .
The below code is for the same .
What I see here is the downloadFile and uploadFile functions run in parallel and uploadFile runs first, it seems.
How to run them in sequence?
const aws = require('aws-sdk');
var s3 = new aws.S3();
var fs = require('fs');
// TODO implement
var params = { Bucket: "buckets3", Key: "input_pdf_img/Gas_bill_sample.pdf" };
const filename = 'Gas_bill_sample.pdf';
const bucketName = "translation-bucket-qa-v1";
const key = "input_pdf_img/Gas_bill_sample.pdf";
const key2 = "output_pdf2docx_img/"+filename;
//console.log(filename);
const tmp_filename = "/tmp/Gas_bill_sample.pdf";
console.log(filename);
const downloadFile = (tmp_filename, bucketName, key) => {
const params2 = {
Bucket: bucketName,
Key: key
};
s3.getObject(params, (err, data) => {
if (err) console.error(err);
fs.writeFileSync(tmp_filename, data.Body.toString());
//console.log(`${filePath} has been created!`);
});
};
//downloadFile(tmp_filename, bucketName, key);
//console.log('download done');
//await sleep(1000);
//upload
const uploadFile = (tmp_filename) => {
// Read content from the file
const fileContent = fs.readFileSync(tmp_filename);
// Setting up S3 upload parameters
const params2 = {
Bucket: bucketName,
Key: key2, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params2, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
downloadFile(tmp_filename, bucketName, key);
console.log('download done');
//setTimeout(() => {console.log("Let the download finish")}, 6000);
uploadFile(tmp_filename);
//setTimeout(() => {console.log("Let the download finish")}, 6000);const aws = require('aws-sdk');
var s3 = new aws.S3();
var fs = require('fs');
// TODO implement
var params = { Bucket: "buckets3", Key: "input_pdf_img/Gas_bill_sample.pdf" };
const filename = 'Gas_bill_sample.pdf';
const bucketName = "translation-bucket-qa-v1";
const key = "input_pdf_img/Gas_bill_sample.pdf";
const key2 = "output_pdf2docx_img/"+filename;
//console.log(filename);
const tmp_filename = "/tmp/Gas_bill_sample.pdf";
console.log(filename);
const downloadFile = (tmp_filename, bucketName, key) => {
const params2 = {
Bucket: bucketName,
Key: key
};
s3.getObject(params, (err, data) => {
if (err) console.error(err);
fs.writeFileSync(tmp_filename, data.Body.toString());
//console.log(`${filePath} has been created!`);
});
};
//downloadFile(tmp_filename, bucketName, key);
//console.log('download done');
//await sleep(1000);
//upload
const uploadFile = (tmp_filename) => {
// Read content from the file
const fileContent = fs.readFileSync(tmp_filename);
// Setting up S3 upload parameters
const params2 = {
Bucket: bucketName,
Key: key2, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params2, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
downloadFile(tmp_filename, bucketName, key);
console.log('download done');
//setTimeout(() => {console.log("Let the download finish")}, 6000);
uploadFile(tmp_filename);
//setTimeout(() => {console.log("Let the download finish")}, 6000);
Tried time out and other ways but no help.
Since the const runs in parallel error is "No such file or directory" as the download file runs after uploadFile.

Nodejs - how to upload file from req to aws s3

Working on web-app: React & Node.
the client app sends file to the server, and the server should upload it to an S3 bucket.
using import S3 from 'aws-sdk/clients/s3'; i found the upload function.
The upload function expect to get Buffer or Stream, and i don't know how to convert the file to buffer/stream.
Code
app.get('/upload', (req, res) => {
const { file } = req;
s3.upload({ Bucket: 'MY_BUCKET', Key: 'MY_KEY', Body: streamifyFile(file)});
})
const streamifyFile = () => {
// how to implement
}
Solution:
const uploadToS3 = async (req) => {
const {
file
} = req.body;
const { filename, createReadStream } = await file;
const pass = new stream.PassThrough();
const params = {
Bucket: process.env.S3_BUCKET,
Key: `${context.user.email}/${new Date().toISOString()}.${filename}`,
Body: pass,
};
const uploadPromise = s3.upload(params).promise();
const streamInput: Stream = createReadStream();
streamInput.pipe(pass);
const uploadData = await uploadPromise;
logger.info(
`Successfully upload - file: ${filename}, location: ${uploadData.Location}`,
);
};

How to upload multiple files in nodejs to AWS S3 and save file url into database?

Hi i need to upload multiple images at a time on s3.
Currently i am using express-fileupload to upload single image on AWS, and i want to use same approach to make it upload multiple files to s3 and update images array with urls on mongodb.
My schema property:
const ServiceSchema = new mongoose.Schema(
{
photo: [
{
type: String,
default: 'no-photo.jpg',
},
],
});
module.exports = mongoose.model('Service', ServiceSchema);
My Controller:
// #desc Upload photo for service
// #route PUT /api/v1/services/:id/photo
// #access Private
exports.servicePhotoUpload = asyncHandler(async (req, res, next) => {
const service = await Service.findById(req.params.id);
if (!service) {
return next(new ErrorResponse(`Service not found with id of ${req.params.id}`, 404));
}
// Make sure user adding service is business owner
if (service.user.toString() !== req.user.id && req.user.role !== 'admin') {
return next(
new ErrorResponse(
`User ${req.user.id} is not authorized to update this service to business ${service._id}`,
401
)
);
}
// File Upload validation
if (!req.files) {
return next(new ErrorResponse(`Please upload a file.`, 400));
}
const file = req.files.file;
// Make sure it is a valid image file
if (!file.mimetype.startsWith('image')) {
return next(new ErrorResponse(`Please upload a valid image file.`, 400));
}
//Check File Size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please upload an image less then ${process.env.MAX_FILE_UPLOAD / 1024}KB in size.`,
400
)
);
}
// Create custom filename
file.name = `service-uploads/servicePhoto_${service._id}${path.parse(file.name).ext}`;
uploadToS3({
fileData: req.files.file.data,
fileName: file.name,
})
.then(async (result) => {
console.log('Success Result: ', result);
await Service.findByIdAndUpdate(service._id, { photo: result.Location });
return res
.status(200)
.json({ success: true, message: 'Service photo added successfully', url: result.Location });
})
.catch((err) => {
console.log(err);
return next(new ErrorResponse('Failed to upload file to S3', 500));
});
});
My Utility File to upload File to S3:
const AWS = require('aws-sdk');
const uploadToS3 = (options) => {
// Set the AWS Configuration
AWS.config.update({
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
region: 'us-east-2',
});
// Create S3 service object
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
// Setting up S3 upload parameters
const params = {
Bucket: 'toolbox-uploads',
Key: options.fileName, // File name you want to save as in S3
Body: options.fileData, //
};
// Return S3 uploading function as a promise so return url can be handled properly
return s3.upload(params).promise();
};
module.exports = uploadToS3;
My Router:
const express = require('express');
const {
servicePhotoUpload
} = require('../controllers/service');
const Service = require('../models/Service');
router.route('/:id/photo').put(protect, authorize('publisher', 'business', 'admin'), servicePhotoUpload);
module.exports = router;
This above code is workng 100%.
I am bit confused as there were different approach and none worked for me from google and stack overflow and none of them is getting return url and saving into database.
I want to make separate utility file to upload multiple files to 3 same as i did for single files to use them anywhere. That file should return uploaded urls so i can update my database.
I have tried multer-s3 but no solution works for me.
This approach might be different for you but that is how I was able to resolve the same issue.
First you'll need
Multer
multer-s3
aws-sdk
I made a FileUpload class that handles both single and multi-upload (I also needed to be able to upload pdf and video files) and this is the code in my constructor, note that I also specified the s3-bucket in question from aws.
this.s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_KEY,
Bucket: 'name_of_s3_bucket',
});
I created a method called upload in the class. Code below
upload(path, type) {
let ext = 'jpeg';
const multerFilter = (req, file, cb) => {
if (type === 'image') {
if (file.mimetype.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError(
'Not an Image! Please upload only images',
400
),
false
);
}
} else if (type === 'pdf') {
ext = 'pdf';
const isPdf = file.mimetype.split('/')[1];
if (isPdf.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError('Not a pdf! Please upload only pdf', 400),
false
);
}
}
};
const upload = multer({
storage: multers3({
acl: 'public-read',
s3: this.s3,
bucket: 'name_of_s3_bucket',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
let filename = `user-${
req.user.id
}/${path}/${uuid.v4()}-${Date.now()}.${ext}`;
// eslint-disable-next-line camelcase
const paths_with_sub_folders = [
'auditions',
'biography',
'movies',
];
if (paths_with_sub_folders.includes(path)) {
filename = `user-${req.user.id}/${path}/${
req.params.id
}/${uuid.v4()}-${Date.now()}.${ext}`;
}
cb(null, filename);
},
}),
fileFilter: multerFilter,
limits: {
fileSize: 5000000,
},
});
return upload;
}
To consume the above, I import the class into any controller that I needed an upload feature and called the following.
Side Note : Ignore the paths code (It was just a way to generate unique file name for the files)
const upload = new FileUpload('image').upload('profile-images', 'image');
exports.uploadUserPhoto = upload.array('photos', 10);
I then used the uploadUserPhoto as a middleware before calling the following
exports.addToDB = catchAsync(async (req, res, next) => {
if (!req.files) return next();
req.body.photos = [];
Promise.all(
req.files.map(async (file, i) => {
req.body.photos.push(file.key);
})
);
next();
});
On a high-level overview, this is the flow, First, upload your photos to s3 and get the req.files, then look through that req.files object passing them into an array field on your req object then finally save them on your DB.
NOTE: You must promisify the req.file loop since the task is asynchrnous
My final router looked like this
router
.route('/:id')
.put(uploadUserPhoto, addToDB, updateProfile)
Item.js
Your model can have a field called images thats type array.
const mongoose = require("mongoose");
const ItemSchema = mongoose.Schema({
images: {
type: [],
},
});
module.exports = mongoose.model("Items", ItemSchema);
You map through the array of object and only extract the data you want to store, in this example it is the key which is the unique name given to every image thats uploaded.
route.js
router.post("/", verify, upload.array("image"), async (req, res) => {
const { files } = req;
const images = [];
files.map((file) => {
images.push(file.key);
});
try {
new Item({
images,
}).save();
res.status(200).send({message: "saved images to db"})
}catch(err){
res.status(400).send({message: err})
}
});
Let me know if this does what you wanted

Extract zip file in S3 Bucket using AWS lambda functions in nodeJs "Error: Invalid CEN header (bad signature)"

I am struggling with unzipping the contents in AWS S3. AWS S3 does not provide the functionality of unzipping the zip folder in the S3 bucket directly. I facing one error . upload code screenshot attached.
"Error: Invalid CEN header (bad signature)"
Any advice or guidance would be greatly appreciated.
My node Js code to upload the zip file:
const AWS = require('aws-sdk');
const s3 = new AWS.S3({signatureVersion: 'v4'});
exports.handler = async (event,context) => {
const bucket = 'bucket-name';
console.log(event)
const body = event.body;
const key=JSON.parse(body).key
console.log(key)
const params = {
Bucket: bucket,
Key: key,
ContentType: 'application/zip',
Expires: 60
};
try{
const signedURL = await s3.getSignedUrl('putObject', params);
const response = {
err:{},
body:"url send",
url:signedURL
};
return response;
}catch(e){
const response = {
err:e.message,
body:"error occured"
};
return response;
}};
My NodeJs code to extract the zip file:
const S3Unzip = require('s3-unzip');
exports.s3_unzip = function(event, context, callback) {
const filename = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const bucketname = event.Records[0].s3.bucket.name;
console.log(event.Records[0].s3.object.key);
new S3Unzip({
bucket: bucketname,
file: filename,
deleteOnSuccess: true,
verbose: true,
}, function(err, success) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}

application/octet-stream issue while using google moderate images trigger (blur image)

I,m using moderate images solution trigger from google.
I taked this solution from here.
I ask some to upgrade for me this solution & here is code:
'use strict'
const gm = require('gm').subClass({imageMagick: true})
const functions = require('firebase-functions')
const admin = require('firebase-admin')
admin.initializeApp()
const Vision = require('#google-cloud/vision')
const vision = new Vision.ImageAnnotatorClient()
const spawn = require('child-process-promise').spawn
const path = require('path')
const fs = require('fs')
const { Storage } = require('#google-cloud/storage')
const gcs = new Storage({
projectId: xxxxxxxxxxx,
})
exports.blurOffensiveImages = functions.storage
.object()
.onFinalize(async (object) => {
const file = gcs.bucket(object.bucket).file(object.name)
const filePath = `gs://${object.bucket}/${object.name}`
console.log(`Analyzing ${file.name}.`)
try {
const [result] = await vision.safeSearchDetection(filePath)
const detections = result.safeSearchAnnotation || {}
if (
detections.adult === 'VERY_LIKELY' ||
detections.violence === 'VERY_LIKELY'
) {
console.log(`Detected ${file.name} as inappropriate.`)
await blurImage(file, object.bucket, object.metadata)
console.log('Deleted local file', file)
return null
} else {
console.log(`Detected ${file.name} as OK.`)
}
} catch (err) {
console.error(`Failed to analyze ${file.name}.`, err)
throw err
}
})
async function blurImage(file, bucketName, metadata) {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`
const bucket = gcs.bucket(bucketName)
await file.download({ destination: tempLocalPath })
console.log('The file has been downloaded to', tempLocalPath)
// Blur the image using ImageMagick.
await new Promise((resolve, reject) => {
gm(tempLocalPath)
.blur(0, 20)
.write(tempLocalPath, (err, stdout) => {
if (err) {
console.error('Failed to blur image.', err);
reject(err);
} else {
console.log(`Blurred image: ${file.name}`);
resolve(stdout);
}
});
});
console.log('Blurred image created at', tempLocalPath)
await bucket.upload(tempLocalPath, {
destination: file.name,
metadata: { metadata: metadata },
})
console.log('Blurred image uploaded to Storage at', file)
return fs.unlink(tempLocalPath, (e) => { if (e) {console.log(e)}})
}
End it's worked perfect, with one bad issue.
Sometimes when user sending list of photos i have "application/octet-stream" file type, but it should be "image/jpg" all media files at my project should be image/jpg.
one user's publication with error in image data type
It's looks like this trigger stuck when it executing.
I made delay in uploading images in my project, but it's doesn't helps me.
I tested - when i delete this trigger - all uploading photos is well & no issues at all.
Help me fix it.
P.S. want to say also, after uploading - image should have all data like original. (Destination, name etc.)

Resources