I have this Express middleware to deliver thumbnails (.jpeg) on demand:
app.get('/thumb/:index', function(req, res){
var index = req.params.index
if (!index) return res.status(404).send()
var jpeg = PATH.join('./thumbs/', index, '.jpg')
res.status(200).sendFile(jpeg)
})
This answer shows how to do it when using .static but I'm not sure how to use it on a app.get middleware.
The goal is to cache all jpeg thumbnails on memory to speed up response and avoid physical disk acccess all the time.
Here is a way that works and that I could come up with in less than 10 minutes.
The infamous use of a global variable is applicable here. The const cacheMap is a Map that will cache the binary content in memory using the index const as its key.
It is probably not the best implementation of all, but it is better than many possible solutions and it works.
Don't forget to remove/comment the console.log calls that are there just for your to see that the follow-up requests don't print the "reading file ..." string
const path = require('path')
const fs = require('fs')
const app = require('express')()
const cacheMap = new Map()
app.get('/thumb/:index', async function (req, res) {
const { index } = req.params
if (!index) return res.status(404).send()
const jpeg = path.join(__dirname, 'thumbs', `${index}.jpg`)
if (!cacheMap.has(index)) {
console.log('reading file', jpeg)
cacheMap.set(index, await fs.promises.readFile(jpeg))
}
console.log({ index })
return res
.header('content-type', 'image/jpeg')
.status(200)
.send(cacheMap.get(index))
})
Related
I am writing a Firebase function that exposes an API endpoint using express. When the endpoint is called, it needs to download an image from an external API and use that image to make a second API call. The second API call needs the image to be passed as a readableStream. Specifically, I am calling the pinFileToIPFS endpoint of the Pinata API.
My Firebase function is using axios to download the image and fs to write the image to /tmp. Then I am using fs to read the image, convert it to a readableStream and send it to Pinata.
A stripped-down version of my code looks like this:
const functions = require("firebase-functions");
const express = require("express");
const axios = require("axios");
const fs = require('fs-extra')
require("dotenv").config();
const key = process.env.REACT_APP_PINATA_KEY;
const secret = process.env.REACT_APP_PINATA_SECRET;
const pinataSDK = require('#pinata/sdk');
const pinata = pinataSDK(key, secret);
const app = express();
const downloadFile = async (fileUrl, downloadFilePath) => {
try {
const response = await axios({
method: 'GET',
url: fileUrl,
responseType: 'stream',
});
// pipe the result stream into a file on disc
response.data.pipe(fs.createWriteStream(downloadFilePath, {flags:'w'}))
// return a promise and resolve when download finishes
return new Promise((resolve, reject) => {
response.data.on('end', () => {
resolve()
})
response.data.on('error', () => {
reject()
})
})
} catch (err) {
console.log('Failed to download image')
console.log(err);
throw new Error(err);
}
};
app.post('/pinata/pinFileToIPFS', cors(), async (req, res) => {
const id = req.query.id;
var url = '<URL of API endpoint to download the image>';
await fs.ensureDir('/tmp');
if (fs.existsSync('/tmp')) {
console.log('Folder: /tmp exists!')
} else {
console.log('Folder: /tmp does not exist!')
}
var filename = '/tmp/image-'+id+'.png';
downloadFile(url, filename);
if (fs.existsSync(filename)) {
console.log('File: ' + filename + ' exists!')
} else {
console.log('File: ' + filename + ' does not exist!')
}
var image = fs.createReadStream(filename);
const options = {
pinataOptions: {cidVersion: 1}
};
pinata.pinFileToIPFS(image, options).then((result) => {
console.log(JSON.stringify(result));
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Authorization, Origin, X-Requested-With, Accept");
res.status(200).json(JSON.stringify(result));
res.send();
}).catch((err) => {
console.log('Failed to pin file');
console.log(err);
res.status(500).json(JSON.stringify(err));
res.send();
});
});
exports.api = functions.https.onRequest(app);
Interestingly, my debug messages tell me that the /tmp folder exists, but the file of my downloaded file does not exist in the file system.
[Error: ENOENT: no such file or directory, open '/tmp/image-314502.png']. Note that the image can be accessed correctly when I manually access the URL of the image.
I've tried to download and save the file using many ways but none of them work. Also, based on what I've read, Firebase Functions allow to write and read temp files from /tmp.
Any advice will be appreciated. Note that I am very new to NodeJS and to Firebase, so please excuse my basic code.
Many thanks!
I was not able to see you are initializing the directory as suggested in this post:
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const thumbFileName = 'thumb_' + fileName;
const workingDir = join(tmpdir(), `${object.name.split('/')[0]}/`);//new
const tmpFilePath = join(workingDir, fileName);
const tmpThumbPath = join(workingDir, thumbFileName);
await fs.ensureDir(workingDir);
Also, please consider that if you are using two functions, the /tmp directory would not be shared as each one has its own. Here is an explanation from Doug Stevenson. In the same answer, there is a very well explained video about local and global scopes and how to use the tmp directory:
Cloud Functions only allows one function to run at a time in a particular server instance. Functions running in parallel run on different server instances, which have different /tmp spaces. Each function invocation runs in complete isolation from each other. You should always clean up files you write in /tmp so that they don't accumulate and cause a server instance to run out of memory over time.
I would suggest using Google Cloud Storage extended with Cloud Functions to achieve your goal.
Making a basic blog with an admin section to learn the basics of node and express. I just implemented multer middleware to save images for a blog post to a folder ("images") on the server - not to mongo or an s3 bucket - keeping it simple for now to learn.
I am using EJS and using res.render to send and render the frontend. However, I want to put the image in the EJS file as well. I've tried simply passing in the filename like so:
res.render(path.resolve(__dirname, "../", "views", "posts.ejs"), {postData, file});
postData being the data on the post from the mongodb collection. All this does is send the filename itself which is not helpful.
I've looked around, but don't seem to find an answer to this, or I'm over thinking this?
Here is the rest of the code for the controller:
const path = require("path");
const fs = require('fs');
const Post = require('../models/modelPosts');
exports.getPost = (req, res, next) => {
const postPath = req.params.post;
Post.findOne({ postPath: postPath }, (error, postData) => {
if (error) { return next(error) };
if (postData.postReadyToView == true) {
// find the correct image
fs.readdirSync('./images').forEach(file => {
const stringOfFile = JSON.stringify(file);
const stringOfPathJPEG = JSON.stringify(postPath + ".jpeg");
const stringOfPathJPG = JSON.stringify(postPath + ".jpg");
const stringOfPathPNG = JSON.stringify(postPath + ".png")
// send the ejs file and image
if (stringOfFile == stringOfPathJPEG ||
stringOfFile == stringOfPathJPG ||
stringOfFile == stringOfPathPNG) {
res.render(path.resolve(__dirname, "../", "views", "posts.ejs"), {
postData, file
});
}
})
} else {
res.redirect(404, "/404");
}
})
};
Send the file path of the page to be rendered as data, register the image garden folder (ex: public/images) as a static folder using express.static in nodejs, and load the image when the file path is loaded in the rendering page. I think you can.
I'm running a NodeJS script that will generate several PDF reports.
Thing is I need to generate several graph for each PDFs, so after several problems, I decided to generate graphs in PNG format, then, make the html page including images. From the HTML, I generate a PDF.
Thing is I don't really need routes, but I need EJS, and I need req / res to generate my graphs:
app.get("/operations/:operation/meters/:meter/weekly_report", async (req, res) => { // Used to generate PNG from graph
const meterId = req.params.meter;
const week = req.query.week;
// get meters from meter
const meter = meters.find(it => it.prm === meterId);
const weeklyData = await generateWeeklyGraphForPRM(meter, week);
ejs.renderFile(path.join(__dirname, './views/partials/', "weekly_graph.ejs"), {
days: weeklyData.days,
conso: weeklyData.consoByHour,
meterLabel: meter.label,
}, (err) => {
if (err) {
res.send(err);
} else {
res.render('partials/weekly_graph.ejs', {
days: weeklyData.days,
conso: weeklyData.consoByHour,
meterLabel: meter.label,
});
}
});
And Then:
async function makePngScreenshot(url, meterId, filename) {
axios.get(url, null); // Make the request to generate html page
const destination = "public/images/" + operation.data.name + "/" + DATE_INI + "_" + DATE_END + "/" + meterId
return new Pageres({delay: 2, filename: filename})
.src(url, ['1300x650'], {crop: true})
.dest(destination)
.run()
}
});
Thing is working, but right now, everything is in index.js
I am trying to break the code into several files.
As I extract each routes into a routes.js, I have the problem that I can't share any longer global vars with all my endpoints.
So, here I find 3 solutions:
Use functions instead of endpoints: I don't need endpoints, but I don't know how to render an EJS file without routes, req / res.
In each routes, get each object again ( inefficient )
Use a redis, or any cache ( ok, but I would like to avoid any extra component for now )
The easiest one should be converting routes into functions, but how can I generate EJS files, without routes, is it possible ?
I hope I understand your task correctly. I made an example of a program that starts using the command line, receives the command line arguments meterId and week, generates a .html file from the .ejs template. I also used the yargs package to easily parse command line arguments.
const path = require('path');
const fs = require('fs');
const argv = require('yargs').argv;
const ejs = require('ejs');
const fsp = fs.promises;
// It would be a good idea to store these parameters in an .env file
const INPUT_FILENAME = 'test.ejs';
const OUTPUT_FILENAME = 'result.html';
const TEMPLATE_FILE = path.resolve(__dirname, './templates', INPUT_FILENAME);
const STORAGE_PATH = path.resolve(__dirname, './storage', OUTPUT_FILENAME);
(async function main({ meterId, week }) {
if (!meterId) {
return console.warn('Specify the command line parameter "meterId"!');
}
if (!week) {
return console.warn('Specify the command line parameter "week"!');
}
try {
const html = await ejs.renderFile(TEMPLATE_FILE, { meterId, week }, { async: true });
await fsp.writeFile(STORAGE_PATH, html);
console.log('Done.');
} catch (error) {
console.error(error);
process.exit(1);
}
})(argv);
And an example command to run the script:
node script.js --meterId=141 --week=44
Please let me know if I understood your task correctly and if my example helps somehow.
I'm trying to consolidate a bunch of route usage throughout my Express API, and I'm hoping there's a way I can do something like this:
const app = express()
const get = {
fetchByHostname({
name
}) {
return `hey ${name}`
}
}
const map = {
'/public/hostname/:hostname': get.fetchByHostname
}
app.use((req, res, next) => {
const url = req.originalUrl
const args = { ...req.body, ...req.query }
const method = map[url] // this won't work
const result = method(args)
return res.json({
data: result
})
})
I'm trying to avoid passing round the req and res objects and just handle the response to the client in one place. Is there an Express/Node/.js module or way to match the URL, like my map object above?
I really don't understand what you are trying to achieve, but from what i can see, your fectchByHostname({name})should be fetchByHostname(name) and you might be able to return hey $name. You should be sure you are using ES6 because with you args. Else you have to define the as in es5 args = {body: req.body, query: req.query};. Hope it helps.
I want to implement file uploading via nginx and FileAPI on client side. So I have the following questions:
Which is module better suite for this task nginx-upload-module or native clientbodyinfileonly or something else?
How to check that user is authenticated before uploading starts (maybe touch backend and return some data back to nginx like user_id)
How to rename file to hash to be looks like the following www.mysite.com/files/011/b0f/639/011b0f639f69491e9e4cbaf41656297f.jpg ?
How to make and save three copy of uploaded image several sizes (128x128, 96x96, 30x30)?
Which is module better suite for this task nginx-upload-module or
native clientbodyinfileonly or something else?
I just had a look at nginx-upload-module and this is a 7 years old nginx module to handle multipart requests. Nginx has been supporting multipart uploads for years now, so you do not need to change anything to your nginx setup as long as you're not running a 7 years old version!
For the remaining questions I'll give you an example, using :
Thumbnail to generate the thumbnails (You will need graphitemagick installed, but you can replace it with any other lib)
Q to easily generate the different thumbnails concurrently and have a clean code with no callback pyramid
Multer to handle the file upload server-side.
You could use other libraries to do the same thing, but this will show you one way to do it easily. For the sake of this example it is all in one single file.
var express = require('express');
var multer = require('multer');
var md5 = require('MD5');
var Q = require('q');
var Thumbnail = require('thumbnail');
var app = express();
var targetFolder = '/var/www/mysite/files/full/';
var thumbnailFolder = '/var/www/mysite/files/thumbs/';
var thumbnail = new Thumbnail(targetFolder, thumbnailFolder);
var makeThumbnail = Q.nbind(thumbnail.ensureThumbnail,thumbnail);
var thumbnailSizes = [30,96,128];
app.post('/upload', function(req,res,next) {
//Check the authentication here, this is the first middleware.
if(authenticated) {
next();
} else {
res.send(401);
}
}, multer({
dest: targetFolder,
rename: function (fieldname, filename) {
//Rename the file with it's name + date hash
return md5(filename + Date.now());
},
onFileUploadComplete: function(file, req, res) {
var resizePromises = [];
for(var i in thumbnailSizes) {
var p = makeThumbnail(file.name,thumbnailSizes[i],thumbnailSizes[i]);
resizePromises.push(p);
}
Q.all(resizePromises).done(function(){
// The file has been resized to the specified format.
res.send(200);
},function(err) {
console.log(err);
res.send(500);
// Error !
});
}
});