Sending image from server to use in EJS file - node.js

Making a basic blog with an admin section to learn the basics of node and express. I just implemented multer middleware to save images for a blog post to a folder ("images") on the server - not to mongo or an s3 bucket - keeping it simple for now to learn.
I am using EJS and using res.render to send and render the frontend. However, I want to put the image in the EJS file as well. I've tried simply passing in the filename like so:
res.render(path.resolve(__dirname, "../", "views", "posts.ejs"), {postData, file});
postData being the data on the post from the mongodb collection. All this does is send the filename itself which is not helpful.
I've looked around, but don't seem to find an answer to this, or I'm over thinking this?
Here is the rest of the code for the controller:
const path = require("path");
const fs = require('fs');
const Post = require('../models/modelPosts');
exports.getPost = (req, res, next) => {
const postPath = req.params.post;
Post.findOne({ postPath: postPath }, (error, postData) => {
if (error) { return next(error) };
if (postData.postReadyToView == true) {
// find the correct image
fs.readdirSync('./images').forEach(file => {
const stringOfFile = JSON.stringify(file);
const stringOfPathJPEG = JSON.stringify(postPath + ".jpeg");
const stringOfPathJPG = JSON.stringify(postPath + ".jpg");
const stringOfPathPNG = JSON.stringify(postPath + ".png")
// send the ejs file and image
if (stringOfFile == stringOfPathJPEG ||
stringOfFile == stringOfPathJPG ||
stringOfFile == stringOfPathPNG) {
res.render(path.resolve(__dirname, "../", "views", "posts.ejs"), {
postData, file
});
}
})
} else {
res.redirect(404, "/404");
}
})
};

Send the file path of the page to be rendered as data, register the image garden folder (ex: public/images) as a static folder using express.static in nodejs, and load the image when the file path is loaded in the rendering page. I think you can.

Related

Stop Download Image automatically when browsed via URL show image into browser

app.get('/get/image/*', (req, res) => {
const path = req.params[0];
const fName = path.substring(path.lastIndexOf('/') + 1);
res.setHeader('content-type', 'image/png');
res.download(uploadBasePath + path, fName);
})
This is downloading the images instead of displaying them over the browser. I want to display on the browser not do auto download. How can I fix it?
I'm not sure what exactly you're trying to do, but if you just want to display the image in the browser, there are several ways to do it, here is one example:
// Send image url in response
app.get('/get/image/*', (req, res) => {
const imgUrl = 'yourimgurlhere';
res.send({ imageUrl: imgUrl })
})
And in your frontend code you refer to that url.
Example using ejs:
<img src="/<%= imageUrl %>" alt="My image is now displayed in the browser">
You can read the image buffer and then respond the buffer to the client with the content type is image/jpeg.
const fs = require('fs');
...
app.get('/get/image/*', (req, res) => {
const path = req.params[0];
const fName = path.substring(path.lastIndexOf('/') + 1);
fs.readFile(uploadBasePath + fName, (err, buffer) => {
if (err) {
return res.status(500).send(err);
}
res.contentType('image/jpeg');
res.send(buffer)
})
})
This is really simple just send the file from response in express.
const path = require('path')
res.sendFile(path.join(__dirname, "public",'filepath.jpg'));

How to cache .jpg in memory for Express middleware?

I have this Express middleware to deliver thumbnails (.jpeg) on demand:
app.get('/thumb/:index', function(req, res){
var index = req.params.index
if (!index) return res.status(404).send()
var jpeg = PATH.join('./thumbs/', index, '.jpg')
res.status(200).sendFile(jpeg)
})
This answer shows how to do it when using .static but I'm not sure how to use it on a app.get middleware.
The goal is to cache all jpeg thumbnails on memory to speed up response and avoid physical disk acccess all the time.
Here is a way that works and that I could come up with in less than 10 minutes.
The infamous use of a global variable is applicable here. The const cacheMap is a Map that will cache the binary content in memory using the index const as its key.
It is probably not the best implementation of all, but it is better than many possible solutions and it works.
Don't forget to remove/comment the console.log calls that are there just for your to see that the follow-up requests don't print the "reading file ..." string
const path = require('path')
const fs = require('fs')
const app = require('express')()
const cacheMap = new Map()
app.get('/thumb/:index', async function (req, res) {
const { index } = req.params
if (!index) return res.status(404).send()
const jpeg = path.join(__dirname, 'thumbs', `${index}.jpg`)
if (!cacheMap.has(index)) {
console.log('reading file', jpeg)
cacheMap.set(index, await fs.promises.readFile(jpeg))
}
console.log({ index })
return res
.header('content-type', 'image/jpeg')
.status(200)
.send(cacheMap.get(index))
})

Is it possible to use EJS without routes with nodeJS / Express

I'm running a NodeJS script that will generate several PDF reports.
Thing is I need to generate several graph for each PDFs, so after several problems, I decided to generate graphs in PNG format, then, make the html page including images. From the HTML, I generate a PDF.
Thing is I don't really need routes, but I need EJS, and I need req / res to generate my graphs:
app.get("/operations/:operation/meters/:meter/weekly_report", async (req, res) => { // Used to generate PNG from graph
const meterId = req.params.meter;
const week = req.query.week;
// get meters from meter
const meter = meters.find(it => it.prm === meterId);
const weeklyData = await generateWeeklyGraphForPRM(meter, week);
ejs.renderFile(path.join(__dirname, './views/partials/', "weekly_graph.ejs"), {
days: weeklyData.days,
conso: weeklyData.consoByHour,
meterLabel: meter.label,
}, (err) => {
if (err) {
res.send(err);
} else {
res.render('partials/weekly_graph.ejs', {
days: weeklyData.days,
conso: weeklyData.consoByHour,
meterLabel: meter.label,
});
}
});
And Then:
async function makePngScreenshot(url, meterId, filename) {
axios.get(url, null); // Make the request to generate html page
const destination = "public/images/" + operation.data.name + "/" + DATE_INI + "_" + DATE_END + "/" + meterId
return new Pageres({delay: 2, filename: filename})
.src(url, ['1300x650'], {crop: true})
.dest(destination)
.run()
}
});
Thing is working, but right now, everything is in index.js
I am trying to break the code into several files.
As I extract each routes into a routes.js, I have the problem that I can't share any longer global vars with all my endpoints.
So, here I find 3 solutions:
Use functions instead of endpoints: I don't need endpoints, but I don't know how to render an EJS file without routes, req / res.
In each routes, get each object again ( inefficient )
Use a redis, or any cache ( ok, but I would like to avoid any extra component for now )
The easiest one should be converting routes into functions, but how can I generate EJS files, without routes, is it possible ?
I hope I understand your task correctly. I made an example of a program that starts using the command line, receives the command line arguments meterId and week, generates a .html file from the .ejs template. I also used the yargs package to easily parse command line arguments.
const path = require('path');
const fs = require('fs');
const argv = require('yargs').argv;
const ejs = require('ejs');
const fsp = fs.promises;
// It would be a good idea to store these parameters in an .env file
const INPUT_FILENAME = 'test.ejs';
const OUTPUT_FILENAME = 'result.html';
const TEMPLATE_FILE = path.resolve(__dirname, './templates', INPUT_FILENAME);
const STORAGE_PATH = path.resolve(__dirname, './storage', OUTPUT_FILENAME);
(async function main({ meterId, week }) {
if (!meterId) {
return console.warn('Specify the command line parameter "meterId"!');
}
if (!week) {
return console.warn('Specify the command line parameter "week"!');
}
try {
const html = await ejs.renderFile(TEMPLATE_FILE, { meterId, week }, { async: true });
await fsp.writeFile(STORAGE_PATH, html);
console.log('Done.');
} catch (error) {
console.error(error);
process.exit(1);
}
})(argv);
And an example command to run the script:
node script.js --meterId=141 --week=44
Please let me know if I understood your task correctly and if my example helps somehow.

How to download csv file using fast csv in node js

I want to export my data into csv file so for that purpose i used fast-csv in node js. its working fine my code is
var csv = require("fast-csv");
app.get('/file/exportToExcel', function(req, res) {
var whereCondi = {
'Id': req.query.id,
}
var response = {};
table.find(whereCondi, function(err, tableData) {
if (err) {
response.status = 400;
response.message = err.message;
res.send(response);
} else {
var csvStream = csv.createWriteStream({headers: true}),
writableStream = fs.createWriteStream("code.csv");
writableStream.on("finish", function() {
});
csvStream.pipe(writableStream);
for (var i = 0; i < tableData.length; i++) {
csvStream.write({allcodes: tableData[i].code});
}
csvStream.end();
}
});
});
but the problem is its saving that csv file in my root folder i want to download that csv file when user click on export to excel please help me.
writableStream = fs.createWriteStream("coupons.csv");
This looks to be your problem if I'm understanding you correctly. The current code saves the csv file relative to the app file (basically in the same directory in your case).
Try something like:
writableStream = fs.createWriteStream("./some/directory/coupons.csv");
You should create the csv file in your directory an then delete it in the same way like that
const express = require('express')
const objectstocsv = require('objects-to-csv')
const fs = require("fs")
const app = express()
var data = [
{code: 'CA', name: 'California'},
{code: 'TX', name: 'Texas'},
{code: 'NY', name: 'New York'},
];
const PORT = process.env.PORT || 5000
app.get('/',async(req,res) => {
const csv = new objectstocsv(data);
// Save to file:
await csv.toDisk('./test.csv');
// Download the file
res.download("./test.csv",() => {
//Then delete the csv file in the callback
fs.unlinkSync("./test.csv")
})
})
Very late to the game but wanted to add in case other people were encountering same hurdle. Not sure if this is an ideal solution since I just started learning, but I got around this problem by wrapping the csv creation in an async function, and having that function called when a user clicks on a button.
Essentially, user clicks button > GET request to specific path > export csv and render success page.
index.js or server js file
const exportToCsv = () => {
...some code to get data and generate csv...
};
app.get('/download', async (req, res) => {
exportToCsv();
res.render('<your desired view>');
};
view or html
<button type='submit' onclick='someNavFunction()'>Export</button>
The someNavFunction() can be a helper function to navigate to a new path or some other solution that helps you hit '/download' that you created route for in server file.
This solution worked for me because I wanted to render a success page after download. You can add additional validation to only render if exported successfully etc.

Which is the better approach to upload images via nginx?

I want to implement file uploading via nginx and FileAPI on client side. So I have the following questions:
Which is module better suite for this task nginx-upload-module or native clientbodyinfileonly or something else?
How to check that user is authenticated before uploading starts (maybe touch backend and return some data back to nginx like user_id)
How to rename file to hash to be looks like the following www.mysite.com/files/011/b0f/639/011b0f639f69491e9e4cbaf41656297f.jpg ?
How to make and save three copy of uploaded image several sizes (128x128, 96x96, 30x30)?
Which is module better suite for this task nginx-upload-module or
native clientbodyinfileonly or something else?
I just had a look at nginx-upload-module and this is a 7 years old nginx module to handle multipart requests. Nginx has been supporting multipart uploads for years now, so you do not need to change anything to your nginx setup as long as you're not running a 7 years old version!
For the remaining questions I'll give you an example, using :
Thumbnail to generate the thumbnails (You will need graphitemagick installed, but you can replace it with any other lib)
Q to easily generate the different thumbnails concurrently and have a clean code with no callback pyramid
Multer to handle the file upload server-side.
You could use other libraries to do the same thing, but this will show you one way to do it easily. For the sake of this example it is all in one single file.
var express = require('express');
var multer = require('multer');
var md5 = require('MD5');
var Q = require('q');
var Thumbnail = require('thumbnail');
var app = express();
var targetFolder = '/var/www/mysite/files/full/';
var thumbnailFolder = '/var/www/mysite/files/thumbs/';
var thumbnail = new Thumbnail(targetFolder, thumbnailFolder);
var makeThumbnail = Q.nbind(thumbnail.ensureThumbnail,thumbnail);
var thumbnailSizes = [30,96,128];
app.post('/upload', function(req,res,next) {
//Check the authentication here, this is the first middleware.
if(authenticated) {
next();
} else {
res.send(401);
}
}, multer({
dest: targetFolder,
rename: function (fieldname, filename) {
//Rename the file with it's name + date hash
return md5(filename + Date.now());
},
onFileUploadComplete: function(file, req, res) {
var resizePromises = [];
for(var i in thumbnailSizes) {
var p = makeThumbnail(file.name,thumbnailSizes[i],thumbnailSizes[i]);
resizePromises.push(p);
}
Q.all(resizePromises).done(function(){
// The file has been resized to the specified format.
res.send(200);
},function(err) {
console.log(err);
res.send(500);
// Error !
});
}
});

Resources