How to rename my originalname when using multer memoryStorage? - node.js

i am trying to rename my file originalname when using multer memoryStorage. I am using multer to upload an array of files and when i console.log(req.files) i get:
{
fieldname: 'images',
originalname: 'snake.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
buffer: <Buffer ff d8 38134 more bytes>,
size: 38184
}
The reason i want to rename my originalname is i am storing the images in AWS S3 and if the images have the same name it gets updated not added as a new image.
I have tried doing so by adding the date next to the originalname when storing in database but then originalname does not change when the images get added in bucket.
Here is my code:
posts.js
const storage=multer.memoryStorage()
const upload = multer({
storage: storage,
limits: { fieldSize: 25 * 1024 * 1024 },
});
router.post(
"/",
[ upload.array("images", config.get("maxImageCount")),
imageResize,
],
async (req, res) => {
const paths = await req.files.map((file) => ({ originalName: file.originalname + "-" + new
Date().toISOString() + "-" + uuidv4()}));
await Post.create({
title: req.body.title,
userId: req.body.userId,
Post_Images: paths.map((x) => ({ images: x.originalName })),
},
{
include: [Post_Image] }).then(
res.status(201).send())
imageResize.js
const sharp = require("sharp");
require("dotenv").config();
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ID,
secretAccessKey: process.env.AWS_SECRET,
region:process.env.AWS_REGION
})
module.exports = async (req, res, next) => {
const images = [];
const resizePromises = req.files.map(async (file) => {
console.log(file)
await sharp(file.buffer)
.resize(2000)
.jpeg({ quality: 50 })
.toBuffer()
.then(resized=>s3.upload({
Bucket:process.env.AWS_BUCKET,
Key:file.originalname + "_full.jpg",
Body:file.buffer,
ACL: 'public-read'
}).promise()),
await sharp(file.buffer)
.resize(100)
.jpeg({ quality: 30 })
.toBuffer()
.then(resized=>s3.upload({
Bucket:process.env.AWS_BUCKET,
Key:file.originalname + "_thumb.jpg",
Body:file.buffer,
ACL: 'public-read'
}).promise())
images.push(file.originalname);
});
await Promise.all([...resizePromises]);
req.images = images;
next();
};
in other words, i am trying to change my originalname in my req.files to
originalname + "-" + new Date().toISOString() + "-" + uuidv4()
OR
How do i keep the uuid and the date the same in my posts.js and imageResize.js?

Related

NodeJS google Clould storage getFiles from a large bucket

I am using the nodeJS google storage module to get files from a bucket, but the bucket contains loads of files and folders so it is taking a very long time to return , any suggestion on how to make it faster? or more stream line?
this is my code
export const getFiles = async () => {
const BUCKET_NAME = 'archive';
const bucket = new Storage({credentials: creds}).bucket(BUCKET_NAME);
const [ bucketFiles ] = await bucket.getFiles({autoPaginate: false});
console.log(bucketFiles)
return bucket
.getFiles()
.then(([files]) => {
const filesResponse = files.map(({ metadata: file }) => ({
cacheControl: file.cacheControl || "",
contentEncoding: file.contentEncoding || "",
contentType: file.contentType || "",
version: file.generation,
id: file.id,
downloadLink: file.mediaLink,
path: file.name,
size: file.size,
updated: file.updated,
originalFileName: file.originalFileName,
}));
console.log ({ bucket: bucket.name, files: filesResponse });
return ({ bucket: bucket.name, files: filesResponse });
})
.catch();
};

Upload image to s3 bucket - react native and node js

Within my app a user can select a profile image and i would like that image to be uploaded to an s3 bucket when the user saves their profile data
I pass the image data (and json, which consists of name, email, telephone for example) from my app to an express server and upload there
At present I can pass the image data (the url it seems at present) to an s3 bucket and it saves
I don't think i'm actually saving the image itself though, as when downloading from s3 (manually) and trying to open on my mac it states it may be damaged and i cannot see the image
Feel daft for asking but how do i actually upload the image itself? Thanks
React Native Side
const handleFormSubmit = formData => {
const jsonData = JSON.stringify({
...formData,
});
// Handle profile image
if (imageProps && imageProps.uri) {
const data = new FormData();
data.append('formBody', jsonData);
data.append('image', {
uri:
Platform.OS === 'android'
? imageProps.uri
: imageProps.uri.replace('file://', ''),
type: imageProps.type,
name: imageProps.fileName,
});
sendRequest(data);
} else {
sendRequest(jsonData);
}
};
const sendRequest = data => {
let responseData;
fetch('http://localhost:8080/users/api/update_user_profile', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: data,
})
.then(response => {
responseData = response;
return response.json();
})
.then(jsonData => {
console.log(jsonData)
})
.catch(error => {
console.log(error)
});
};
Server Side
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
// Setting up S3 upload parameters
const params = {
Bucket: 'bucket-folder',
ACL: 'public-read',
Key: req.files.image.name,
Body: req.files.image.path
};
const stored = await s3.upload(params).promise();
You can use Multer for uploading files to s3.
const multer = require('multer');
const AWS = require('aws-sdk');
const uniqid = require('uniqid');
const storage = multer.memoryStorage();
const upload = multer({ storage });
// ? Posts new file to amazon and saves to db
router.post(
'/:id',
upload.single('attachment'),
async (req, res) => {
const unique = uniqid.time();
const { file } = req;
const { filePath } = req.body;
const { id } = req.params;
const s3FileURL = process.env.AWS_UPLOADED_FILE_URL;
const region = process.env.AWS_REGION;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const Bucket = process.env.AWS_BUCKET_NAME + '/' + filePath;
const Key = `${id}/${unique}-${file.originalname}`;
const Body = file.buffer;
const ContentType = file.mimetype;
const ACL = 'public-read';
const s3bucket = new AWS.S3({
accessKeyId,
secretAccessKey,
region,
});
const params = {
Bucket,
Key,
Body,
ContentType,
ACL,
};
s3bucket.upload(params, async (err, data) => {
if (err) {
res.status(500).json({ error: true, Message: err });
} else {
console.log(params);
const newFileUploaded = {
description: req.body.description,
fileLink: `${s3FileURL}${filePath}/${id}/${unique}-${file.originalname}`,
s3_key: params.Key,
};
try {
const response = await postFile({
name: req.body.name,
attachment: newFileUploaded,
alt: req.body.alt,
user: req.body.user,
relatedID: req.body.relatedID,
});
res.status(200).json({
message: response.message,
success: response.success,
result: response.result,
});
} catch (e) {
res.status(500).json({
message:
'File upoladed but Db couldnt saved request (upload by ID)',
success: false,
result: [],
});
}
}
});
}
);

How to upload multiple files with multer-s3 in Nodejs

I am trying to upload multiple images with Nodejs, Expressjs and Multer-s3, but it's not working.
I have a model called Program and the Program model has an array image attribute but when I try to upload multiple images my req.file returns undefined.
Here is my model
const programSchema = new mongoose.Schema({
programtype: {
type: String,
required: true,
},
title: {
type: String,
required: true,
},
description: {
type: String,
required: true,
},
createdAt: {
type: Date,
required: true,
default: Date.now,
},
programImage: {
type: Array,
require: true,
},
});
and my routes
const Program = require("../models/program");
const fs = require("fs");
const multer = require("multer");
const path = require("path");
var AWS = require("aws-sdk");
var multerS3 = require("multer-s3");
AWS.config.update({
secretAccessKey: process.env.S3_SECRECT,
accessKeyId: process.env.AWS_ACCESS_KEY,
region: process.env.S3_REGION,
});
const uploadPath = path.join("public", Program.programImageBasePath);
const imageMineTypes = ["image/jpeg", "image/png", "image/gif"];
const bucketname = "mybucketname";
s3 = new AWS.S3();
const upload = multer({
storage: multerS3({
s3: s3,
acl: "public-read",
bucket: bucketname,
s3BucketEndpoint: true,
endpoint: "http://" + bucketname + ".s3.amazonaws.com",
key: function (req, file, cb) {
const uploadPathWithOriginalName = uploadPath + "/" + file.originalname;
cb(null, uploadPathWithOriginalName);
},
}),
});
router.post("/create", upload.array("cover", 10), async (req, res, next) => {
console.log(req.file);
const program = new Program({
programtype: req.body.programtype,
title: req.body.title,
description: req.body.description,
programImage: req.file.location,
});
try {
const programs = await program.save();
res.redirect("/programs");
} catch {
if (program.programImage != null) {
removeprogramImage(program.programImage);
}
res.render("programs/new");
}
});
and my views
<h2 style="padding-top: 90px;" > New Programs</h2>
<form action="/programs/create" method="POST" enctype="multipart/form-data">
<div>
<label>Image</label>
<input type="file" name="cover" multiple />
</div>
Cacel
<button type="submit">Create</button>
</form>
You can refer to this example.
const s3 = new AWS.S3({
accessKeyId: 'xxxxxxxxx',
secretAccessKey: 'xxxxxxxxx'
});
const uploadS3 = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'xxxxxxxx',
metadata: (req, file, callBack) => {
callBack(null, { fieldName: file.fieldname })
},
key: (req, file, callBack) => {
var fullPath = 'products/' + file.originalname;//If you want to save into a folder concat de name of the folder to the path
callBack(null, fullPath)
}
}),
limits: { fileSize: 2000000 }, // In bytes: 2000000 bytes = 2 MB
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
}
}).array('photos', 10);
exports.uploadProductsImages = async (req, res) => {
uploadS3(req, res, (error) => {
console.log('files', req.files);
if (error) {
console.log('errors', error);
res.status(500).json({
status: 'fail',
error: error
});
} else {
// If File not found
if (req.files === undefined) {
console.log('uploadProductsImages Error: No File Selected!');
res.status(500).json({
status: 'fail',
message: 'Error: No File Selected'
});
} else {
// If Success
let fileArray = req.files,
fileLocation;
const images = [];
for (let i = 0; i < fileArray.length; i++) {
fileLocation = fileArray[i].location;
console.log('filenm', fileLocation);
images.push(fileLocation)
}
// Save the file name into database
return res.status(200).json({
status: 'ok',
filesArray: fileArray,
locationArray: images
});
}
}
})
};

multers3 skipping shouldTransform

I am trying to crop images before it get send to s3 bucket. My issue is in my multerS3 options shouldTransform is being skipped hence not taking the transforms.
Here is my entire file of upload
require('multer-s3-transform');
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const sharp = require('sharp');
aws.config.update({
secretAccessKey: secretAccessKey,
accessKeyId: accessKeyId,
region: region,
});
const s3 = new aws.S3();
const fileFilter = (req, file, cb) => {
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png' || file.mimetype === 'image/jpg') {
cb(null, true);
} else {
cb(new Error(message.FAIL.invalidImage), false);
}
};
const upload = multer({
fileFilter,
storage: multerS3({
s3,
bucket: bucket,
acl: 'public-read',
shouldTransform: function (req, file, cb) {
console.log('in should transform ');
cb(null, true);
},
transforms: [
{
id: 'original',
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
transform: function (req, file, cb) {
console.log('og');
cb(null, sharp().jpg())
},
},
{
id: 'resized',
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
transform: function (req, file, cb) {
console.log('thumbnail');
cb(null, sharp().resize(300, 300).jpg())
},
}
],
metadata: function (req, file, cb) {
cb(null, {fieldName: 'some meta'});
},
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
})
});
module.exports = upload;
Here is my route
const photoUpload = upload.fields([{name: 'photo', maxCount: 1}]);
// Route for uploading photo image
app.post(routeRoot + '/upload/photo', function (req, res) {
console.log('in route ');
photoUpload(req, res, function (err) {
if (err) {
return res.status(200).send({error: {message: err.message}});
} else {
account.uploadPhoto(req, res);
}
})
});
The result I get
files [Object: null prototype] {
photo: [
{
fieldname: 'photo',
originalname: '4.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
size: 84154,
bucket: '...',
key: '...',
acl: 'public-read',
contentType: 'image/jpeg',
contentDisposition: null,
storageClass: 'STANDARD',
serverSideEncryption: null,
metadata: [Object],
location: '...',
etag: '...',
versionId: undefined
}
]
}
File upload to S3 works but it is not being transformed. I have been trying to figure this one out.
Thank you!
I've figured it out.
const multerS3 = require('multer-s3-transform');
instead of
const multerS3 = require('multer-s3');
for some reason the docs has 'multer-s3' require but not 'multer-s3-transform' which is what is needed for shouldTransform to work

Multer upload files into array of objects

I have an array of objects like this:
data: [
{"image": File, "certificate": File},
{"image": File, "certificate": File}
]
The way I'm taking to read the files on the server is doing a loop before send the POST request:
...
const fd = new FormData();
fd.append('image', data[0].image);
fd.append('image', data[1].image);
fd.append('certificate', data[0].certificate);
fd.append('certificate', data[1].certificate);
this.http.post('http://localhost:3000/upload', fd, options);
Then on the server side I do:
upload.fields([{ name: 'image', maxCount: 2 }, { name: 'certificate', maxCount: 2 }])
So my question is... Is there a way that if I send the array like:
fd.append('data', data)
so multer can take the files that are into the data field?
For anyone still struggling with this, this is what worked for me
Backend Code:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, path.join(__dirname, './images/'))
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now() + file.originalname)
}
});
const upload = multer({ storage: storage })
app.post('/generate', upload.fields([{ name: 'layer1', maxCount: 6 }, { name: 'layer2', maxCount: 6 }, { name: 'layer3', maxCount: 6 }]), function (req, res) {
console.log(req.files)
})
Client request:
const formdata = new FormData();
var myHeaders = new Headers();
myHeaders.append("Accept", "application/json");
for (const layer of layers) {
for (const img of layer.files)
formdata.append(`layer${layer.id}`, img)
}
const requestOptions = {
method: 'POST',
headers: myHeaders,
body: formdata,
redirect: 'follow'
};
fetch("http://localhost:5000/generate", requestOptions)

Resources