I am getting error on submitting the form - node.js

While connecting to mongodb, I am getting a warning
"DeprecationWarning: current URL string parser is deprecated, and will
be removed in a future version. To use the new parser, pass option {
useNewUrlParser: true } to MongoClient.connect."
while submitting the form - I am getting
The database connection must be open to store files
at GridFSStorage._handleFile (C:\Users\charan puli\Desktop\upload\node_modules\multer-gridfs-storage\lib\gridfs.js:341:17)
at C:\Users\charan puli\Desktop\upload\node_modules\multer\lib\make-middleware.js:144:17
at allowAll (C:\Users\charan puli\Desktop\upload\node_modules\multer\index.js:8:3)
at wrappedFileFilter (C:\Users\charan puli\Desktop\upload\node_modules\multer\index.js:44:7)
at Busboy. (C:\Users\charan puli\Desktop\upload\node_modules\multer\lib\make-middleware.js:114:7)
at Busboy.emit (events.js:182:13)
//////////////////app.js///////////////////////
//middle wares
app.use(bodyparser.json())
app.use(methodOverride('_method'))
app.set("view engine","ejs")
//connection
var mongoURI='mongodb+srv://user:password#clusterpuli-xs9yc.mongodb.net/test?retryWrites=true'
mongoose.connect(mongoURI,{useNewUrlParser:true})
.then(()=>{console.log('connected successfully');
})
.catch(err=>{console.log(err);
})
var conn=mongoose.connection
var gfs
conn.once('open',() =>{
gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('questions')
})
//create storage object
var storage = new GridFsStorage({
url: mongoURI,
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename =buf.toString('hex')+path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'questions'
};
resolve(fileInfo);
});
});
}
});
const upload = multer({ storage });
//#route /upload POST
app.post('/upload',upload.single('file'),(req,res)=>{
res.json({'file':req.file})
})
var port= 3000
app.get("/",(req,res)=>{
res.render('index')
})
app.listen(port,()=>{
console.log(`app is running at ${port}`);
})
////////////////////////////////////
expected - json file object, connected Successfully
actual - database connection must be open

This should work for you, I have tested it:
//connection
const mongoURI = 'mongodb+srv://user:password#clusterpuli-xs9yc.mongodb.net/test?retryWrites=true';
const promise = mongoose.connect(mongoURI, { useNewUrlParser: true });
const conn = mongoose.connection;
let gfs;
conn.once('open',() => {
gfs = Grid(conn, mongoose.mongo);
gfs.collection('questions');
});
//create storage object
const storage = new GridFsStorage({
db: promise,
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'questions'
};
resolve(fileInfo);
});
});
}
});
const upload = multer({ storage });
See this reference here for further information:
Multer's GridFS storage engine
It has to do with the new url parser of the new mongo client of the Node.js MongoDB Driver API. See this reference: MongoClient
I had the same issue that's why I ended up on your question.

//connection
var mongoURI='mongodb+srv://charanpuli:Charan#1999#clusterpuli-xs9yc.mongodb.net/test?retryWrites=true'
var conn = mongoose.connection
MongoClient.connect(db, { useNewUrlParser: true })
.then(()=>{console.log('connected successfully');
})
.catch(err=>{console.log(err);
})
var gfs
conn.once('open',() =>{
gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('questions')
})

Related

Multer GridFs Storage Associate req.user to metadata

I can upload a file to my MongoDB using multer-gridfs-storage, but I am having trouble associating the file with user, what i want is to be able to pass req.user which i get from passportjs to the file metadata
// Create mongo connection
const conn = mongoose.createConnection(mongoURI);
// Init gfs
let gfs;
conn.once('open', () => {
gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('uploads');
});
// Create storage engine
const storage = new GridFsStorage({
url: mongoURI,
file: (req, file) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') +
path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'uploads',
metadata: {
user : // I want to pass here req.user
}
};
resolve(fileInfo);
});
});
},
});
const upload = multer({ storage });
You should implement like this:
// Create mongo connection
const conn = mongoose.createConnection(mongoURI);
// Init gfs
let gfs;
conn.once('open', () => {
gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('uploads');
});
// Create storage engine
const storage = new GridFsStorage({
url: mongoURI,
file: (req, file) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') +
path.extname(file.originalname);
const user = req.user;
const fileInfo = {
filename: filename,
bucketName: 'uploads',
metadata: user
};
resolve(fileInfo);
});
});
},
});
const upload = multer({ storage });

Express Router - uploading to S3 and MongoDb

I'm trying to do the following in Node.js using express router, Multer-S3, Multer, AWS and Mongodb.
I want to:
1: Check if filetype is image, price is number etc (some kind of quality check)
2: If above true, upload image to S3 to get Image url
3: If Image Url was generated, upload to Mongodb, including the generated image url..
Trying with below code but can only get one of these to work at same time..
const express = require("express");
const router = express.Router();
const shopController = require("../controllers/shop");
router.post(
"/shop/create/:shopId",
shopController.creatingShop,
shopController.createShopItem
);
const ShopItem = require("../models/shopitem"); //Mongoose Schema
const multer = require("multer");
const fileview = multer().single("file1"); //Trying to use this to view file before uploading to S3
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1"); //Using this to upload to S3
exports.createShopItem = (req, res, next) => {
fileview(req, res, function (err) {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
return next(new Error("invalid file type"));
}
if (file.size > 2500000) {
return next(new Error("Your image is to big. Maximum 2.5mb"));
}
next();
console.log(
"Here I want to add upload text to mongoDb... including URL from S3 after it is generated"
);
});
exports.creatingShop = (req, res, next) => {
singleUpload(req, res, function (err) {
console.log(req.file);
// res.json({ "image-url": req.file.location });
});
next();
};
Anyone got ideas? Or examples that work?
Best regards,
Oscar
There are 2 ways to do this, either you can use only multer or multer-s3.
For simplicity, I will show you the way using only multer.
Flow of processing as follow:
Multer process and save to local
You read from local, and upload to s3 using s3 SDK (You should explore how to remove the file after upload as well, but I wont clutter you with this logic here)
If upload is successful, you retrieve the URL and pass it to your MongoDB.
// Make "temp" directory as multer.diskStorage wont create folder
fs.mkdir('./temp', { recursive: true }, (err) => {
if (err) throw err;
});
const PORT = parseInt(process.argv[2]) || parseInt(process.env.PORT) || 3000;
// Multer
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './temp');
},
filename: function (req, file, cb) {
let extArray = file.mimetype.split('/');
let extension = extArray[extArray.length - 1];
cb(null, new Date().getTime() + '.' + extension);
},
});
const upload = multer({ storage: storage });
const endpoint = new AWS.Endpoint(AWS_S3_HOSTNAME);
const s3 = new AWS.S3({
endpoint,
accessKeyId: AWS_S3_ACCESSKEY_ID,
secretAccessKey: AWS_S3_SECRET_ACCESSKEY,
});
// Get the uploaded file in local here
const readFile = (path) =>
new Promise((resolve, reject) =>
fs.readFile(path, (err, buff) => {
if (null != err) reject(err);
else resolve(buff);
})
// Upload to AWS S3 here
const putObject = (file, buff, s3) =>
new Promise((resolve, reject) => {
const params = {
Bucket: AWS_S3_BUCKET_NAME,
Key: file.filename,
Body: buff,
ACL: 'public-read',
ContentType: file.mimetype,
ContentLength: file.size,
};
s3.putObject(params, (err, result) => {
if (null != err) reject(err);
else resolve(file.filename);
});
});
);
const mongoClient = new MongoClient(MONGO_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
app.post('/api/post', upload.single('imageFile'), async (req, res) => {
readFile(req.file.path)
.then((buff) =>
// Insert Image to S3 upon succesful read
putObject(req.file, buff, s3)
)
.then((results) => {
// build url of the resource upon successful insertion
const resourceURL = `https://${AWS_S3_BUCKET_NAME}.${AWS_S3_HOSTNAME}/${results}`;
const doc = {
comments,
title,
ts: new Date(),
image: resourceURL, // Your URL reference to image here
};
// Insert to your mongoDB
mongoClient
.db(MONGO_DB)
.collection(MONGO_COLLECTION)
.insertOne(doc)
.then((results) => {
// delete the temp file when no error from MONGO & AWS S3
fs.unlink(req.file.path, () => {});
// return the inserted object
res.status(200).json(results.ops[0]);
})
.catch((error) => {
console.error('Mongo insert error: ', error);
res.status(500);
res.json({ error });
});
})
.catch((error) => {
console.error('insert error: ', error);
res.status(500);
res.json({ error });
});
}

Chunk uploading with Filepond to NodeJS running Express

I need to upload large files (2GB+) using Filepond to a NodeJS server running Express. These files need to be streamed (because of their size) to two locations...a local folder and a remote folder.
My research led me to a solution using tus-node-server https://github.com/pqina/filepond/issues/48
I've implemented the solution as seen below but I don't know how to:
1. Change the name of the file on upload (it uses a UID, I'd like to use the filename from the "metadata" property)
2. Execute a function on the server during or after the upload.
Any help?
Client.js
Filepond.setOptions({
server: {
process: (fieldName, file, metadata, load, error, progress, abort) => {
var upload = new tus.Upload(file, {
endpoint: `/upload`,
retryDelays: [0, 1000, 3000, 5000],
metadata: {
filename: file.name,
filetype: file.type
},
onError: function(err) {
console.log("Failed because: " + err);
error(err)
},
onProgress: function(bytesUploaded, bytesTotal) {
progress(true, bytesUploaded, bytesTotal)
},
onSuccess: function() {
load(upload.url.split('/').pop())
}
})
// Start the upload
upload.start();
return {
abort: () => {
upload.abort()
abort()
}
}
}
}
})
Server.js
const express = require('express');
const app = express();
const uploadApp = express();
const Client = require('ftp');
const path = require('path');
const tus = require('tus-node-server');
var tusServer = new tus.Server();
tusServer.datastore = new tus.FileStore({
path: '/public/files'
});
uploadApp.all('*', tusServer.handle.bind(tusServer));
app.use('/public', express.static(path.join(__dirname, "/public")));
app.use('/upload', uploadApp);
app.listen(3000);
Before trying this method I was using app.post to receive the upload and perform further action.
app.post('/upload', (req, res, next) => {
let uploadFile = req.files.file,
fileName = req.files.file.name,
ftp = new Client();
uploadFile.mv(`${__dirname}/public/files/${fileName}`, err => {
if (err) { return res.status(500).send(err) };
res.json({ file: `public/${req.files.file.name}` })
});
ftp.on('ready', () => {
ftp.put(fileName, `/ftppath/${fileName}`, err => {
if (err) throw err;
ftp.end();
})
})
ftp.connect(ftpOptions)
});

How to upload image from jimp with multer in nodejs

I sent a request from the client with an image link in the body, in the node server I use jimp (Jimp NPM) to resize the image which I was sent, then I need to save the image to mongodb with multer but I have some confuse with this step, does anyone have any idea ? thanks !
here is my code so far :
const multer = require('multer');
const GridFsStorage = require('multer-gridfs-storage');
const mongoose = require('mongoose');
exports.resizeImage = async function (req, res) {
let storageFS = new GridFsStorage({
db: app.get("mongodb"),
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = file.originalname;
const fileInfo = {
filename: filename,
bucketName: 'images'
};
resolve(fileInfo);
});
});
}
});
var upload = multer({ storage: storageFS }).single('image');
try {
Jimp.read(req.body.imagePath)
.then(image => {
image.resize(150, 150, function (err, img) {
// How to store img to mongodb with multer here ?
});
})
.catch(err => {
throw err
});
} catch (error) {
res.send(error);
}
}

how to download a file saved in gridFS using nodeJS

I need to download a resume from GridFS, below is the code ive written to do it, but this seems to not give me a physical file for download, this is used to reading the contents. How can i download the file?
exports.getFileById = function(req, res){
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
var id = req.params.ID;
gfs.exist({_id: id,root: 'resume'}, function (err, found) {
if (err) return handleError(err);
if (!found)
return res.send('Error on the database looking for the file.');
gfs.createReadStream({_id: id,root: 'resume'}).pipe(res);
});
};
Hope this helps!
exports.downloadResume = function(req, res){
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
gfs.findOne({ _id: <resumeId>, root: <collectionName> }, function (err, file) {
if (err) {
return res.status(400).send(err);
}
else if (!file) {
return res.status(404).send('Error on the database looking for the file.');
}
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: <resumeId>,
root: '<collectionName>'
});
readstream.on("error", function(err) {
res.end();
});
readstream.pipe(res);
});
};
I took hints from accepted answer. But I had to jump through some hoops to get it working hope this helps.
const mongodb = require('mongodb');
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
eval(`Grid.prototype.findOne = ${Grid.prototype.findOne.toString().replace('nextObject', 'next')}`);
const mongoURI = config.mongoURI;
const connection = mongoose.createConnection(mongoURI);
app.get('/download', async (req, res) => {
var id = "<file_id_xyz>";
gfs = Grid(connection.db, mongoose.mongo);
gfs.collection("<name_of_collection>").findOne({ "_id": mongodb.ObjectId(id) }, (err, file) => {
if (err) {
// report the error
console.log(err);
} else {
// detect the content type and set the appropriate response headers.
let mimeType = file.contentType;
if (!mimeType) {
mimeType = mime.lookup(file.filename);
}
res.set({
'Content-Type': mimeType,
'Content-Disposition': 'attachment; filename=' + file.filename
});
const readStream = gfs.createReadStream({
_id: id
});
readStream.on('error', err => {
// report stream error
console.log(err);
});
// the response will be the file itself.
readStream.pipe(res);
}
});

Resources