I am using multer to upload images to the server . And i am using sharp to resize the images and then send to the client. But what happens is once a user uploads the image, multer uploads it to the server and sharp resizes it even but if the same user uploads again multer uploads the new file but sharp doesnt resize the new uploaded image rather it sends the original resized image.
app.post('/upload',(req,res)=>{
upload(req,res,(err)=>{
if(err){
res.render('picupload',{
msg: err
});
}else{
if(req.file == undefined){
res.render('picupload',{
msg: 'Error: No File selected!'
});
} else{
console.log(req.file);
sharp(`./public/uploads/${req.file.filename}`)
.resize(40,40)
.toFile(`./public/uploads/resize/${req.file.filename}`,(err,info)=>{
if(err) throw err;
console.log(info);
});
res.render(pathjoin13,{
file: `uploads/${req.file.filename}` ,
file1: `uploads/resize/${req.file.filename}`
});
}
}
});
});
The res.render() call depends on the output of the asynchronous toFile() call so will need to move to its callback, something like:
.toFile(`./public/uploads/resize/${req.file.filename}`,(err,info)=>{
if (err) throw err;
console.log(info);
res.render(pathjoin13,{
file: `uploads/${req.file.filename}` ,
file1: `uploads/resize/${req.file.filename}`
});
});
Related
It is my first time of trying to use cloudinary and multer to upload and store images for my blog application.
Initially, I was storing the images locally using multer and saved the image url in mongodb. Now, I store the images in cloudinary and save the url in database.
I noticed that the cloudinary option is a bit slower compared to saving locally. Is there a workaround this?
Here is my nodecode:
app.post("/api/v1/upload", upload.single("file"), async (req, res) =>{
try {
const fileStr = req.file.path
if(!fileStr){
return res.status(500).json( 'No image found');
}
const uploadResponse = await cloudinary.uploader.upload(fileStr, {
upload_preset: 'nodeblog',
});
fs.unlinkSync(fileStr)
const result = {
url: uploadResponse.secure_url,
publicId: uploadResponse.public_id
}
return res.status(200).json(result)
} catch (err) {
console.error(err);
return res.status(500).json({ err: 'Something went wrong' });
}
});
After image has uploaded successfully, I simply deleted it locally.
I have created a function to upload image to a custom directory https://sailsjs.com/documentation/concepts/file-uploads .
code:
req.file('avatar').upload({
dirname: require('path').resolve(sails.config.appPath, 'assets/images')
},function (err, uploadedFiles) {
if (err) return res.negotiate(err);
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!'
});
});
how can I upload same file into 2 different paths.
the code I have tried:
var imgPathArr;
var dePathWeb = 'd:/images/web';
var dePath = 'd:/images/mobile';
imgPathArr.push(dePathWeb);
imgPathArr.push(dePath);
req.file('file').upload({
dirname: require('path').resolve(imgPathArr[0])
},function (err, uploadedFiles) {
if (err) return res.send(500, err);
console.log("uploadedFiles web " + uploadedFiles[0].fd);
req.file('file').upload({
dirname: require('path').resolve(imgPathArr[1])
},function (err, files) {
if (err) return res.send(500, err);
console.log("files mobile " + files[0].fd);
});
res.send("ok");
});
});
but each and every time the images are saved into a same path.
console:
uploadedFiles web c:\xampp\htdocs\meServer\images\secondNavi\000a073e-cd6c-4758-ab44-54d32ddfc20a.png
.
files mobile c:\xampp\htdocs\meServer\images\secondNavi\000a073e-cd6c-4758-ab44-54d32ddfc20a.png
why it always take the same path. the image is always only upload into the web directory. I'm defining the paths separately using array index.
Just use fsfrom node DOCS to copy this file.
Your code is really messy. Try to modulate that with a Controller + Service helper. Like ImageController dealing with req and res and ImageService to deal with everything else...
I want get content from my uploaded file. Uploaded file is just text file and i want read this file line by line
req.file("model").upload(function (err, uploadedFiles){
if (err) return res.serverError(err);
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!',
files: uploadedFiles,
content: uploadedFiles[0] // I want to get my uploaded file content
// Stream or buffer
});
});
You can get the file descriptor from uploadedFiles[0].fd
Use it to read / stream the file.
fs.readFile(uploadedFiles[0].fd, 'utf8', function (err,data) {
return res.json(200, {message: 'Ok', data: data});
})
First install Local filesystem streaming binary adapter for Sails.js / Waterline
npm install sails-local-fs
in your code create fs instance
var fs = require('fs');
And now use readFile() method to get content of your file
fs.readFile(files[0].fd, 'utf8', function (err,data) {
return res.json(200, {message: 'Ok', data: data});
})
In the current project I am working with sails js as back end and angular js as front end. I have implemented a cropping module. By current problem is that, the output of the cropping module is base64 data. For doing some manipulations I need to convert this base 64 data to an image file. I received the base64 data at server side. Now need to convert this data to a file in sails js server side. I used a code for that, but not creating the image file.
My sample base 64 data is below
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAgAElEQVR4Xuy9B6wkeX4e9lVVd1Xn3P1yDpNz2J3N+e72juFODHfH......................."
my code for converting this base64 to image file is
var image = req.body.image.replace(/^data:image\/jpeg;base64,/, "");
var filePath = 'imagecontents/';
fileName = filePath +'abc.jpg';
mkdirp(path.join(__dirname, '../../..' + filePath), function (err) {
if (err) {
logger.log('error', err);
res.sendStatus(500);
throw err;
}
require("fs").writeFile(path.join(__dirname, '../../..' + fileName), image, 'base64', function (err) {
if (err) {
logger.log('error', err);
res.sendStatus(500);
throw err;
}
});
Plz help to rectify is there is any error in in this code
You can write the file using the below code:
var base64Data = data.replace(/^data:image\/png;base64,/, "");
require("fs").writeFile("out.png", base64Data, 'base64', function(err) {
console.log(err);
});
I have an express app, which works when I run it locally. The issue is when downloading a file which as saved in mongoDB using GridFS. When running it locally (I just do ./bin/www and go to localhost:3000), I can download the file. But when I run it remotely, I download an html file.
This is the route which handles the response:
router.get('/getfile',function(req,res) {
if (req.isAuthenticated())
{
var gfs = Grid(mongoose.connection, mongoose.mongo);
var id = req.query.id;
gfs.exist({_id: id}, function (err, found) {
if (err) return handleError(err);
if (!found)
res.send('Error on the database looking for the file.')
});
var readStream = gfs.createReadStream({
_id: id
}).pipe(res);
}
else
res.redirect('/login');
});
and that is called by this line in a jade file:
td #[a(href="getfile?id=#{log.videoId}" download="video") #[span(name='video').glyphicon.glyphicon-download]]
On the server, I'm doing:
/logApp$ export NODE_ENV=production
/logApp$ ./bin/www
the mongoDB deamon is running. In fact, I can query the database. And I'm not writing any file! I want to read it.
EDIT: I found the error message:
MongoError: file with id #### not opened for writing
You need to move the code that pipes the file to the response into the gfs.exist callback so that it runs after the exist check.
gfs.exist({ _id: id }, function(err, found) {
if (err) {
handleError(err);
return;
}
if (!found) {
res.send('Error on the database looking for the file.')
return;
}
// We only get here if the file actually exists, so pipe it to the response
gfs.createReadStream({ _id: id }).pipe(res);
});
Apparently you get that generic "not opened for writing" error if the file doesn't exist.