Multiple fetch res render function , hey I am trying to perform and multiple res render function in ejs but my data is lost is their any fix
app.get("/" , function(req,res){
fetch(`${domain}/popular`)
.then(res => res.text())
.then(datap => {
res.render('home',{datap : JSON.parse(datap)});})
fetch(`${domain}/recent-release`)
.then(res => res.text())
.then(datar => {
res.render('home',{datar : JSON.parse(datar)});})
})
i want to make multiple fetch function but the datar is replacing datap
Convert the function to async, await for the two requests, render only when they're done. (I also took the liberty of using the asynchronous res.json() parser instead of res.text() + JSON.parse.)
app.get("/", async function(req, res) {
const res1 = await fetch(`${domain}/popular`);
const datap = await res1.json();
const res2 = await fetch(`${domain}/recent-release`);
const datar = await res2.json();
res.render("home", { datap, datar });
});
(A future optimization could be to do the two requests in parallel using Promise.all().)
Related
I was trying to solve it using inHTMLData() of xss-filters library like this answer. But it did not work as req.query is an object
function data(req, res) {
fetchData(req.params.uniqueId, req.params.name, req.query) // query has multiple data
.then((result) => {res.json(result)})
.catch((error) => res.json(error));
};
Using xss-filters npm library
First stringify query string
Use inHTMLData of xss-filters
Use JSON.parse
const xssFilters = require('xss-filters');
function data(req, res) {
const uniqueId = xssFilters.inHTMLData(req.params.uniqueId);
const name = xssFilters.inHTMLData(req.params.name);
const query = xssFilters.inHTMLData(JSON.stringify(req.query));
fetchData(uniqueId, name, JSON.parse(query)) // query has multiple data
.then((result) => {res.json(result)})
.catch((error) => res.json(error));
};
I have a problem with my Express JS app : When I'm trying to call a function, this function is endlessly called ... It opens a lot of chromium browser and cause performance issues ...
I just want to call this function one time.
I've found a solution to make it work (And called just one time), but in this situation I can't pass any parameters ...
const farm = (async () => {
const browser = await puppeteer.launch({headless: true});
const page = await browser.newPage();
await page.goto("https://www.example.com/?s=" + term);
await page.waitForSelector("div");
const postLinks = await page.evaluate(() => {
let postLinks = [];
let elements = document.querySelectorAll('div.article');
for (element of elements) {
postLinks.push({
title: element.querySelector('div.meta-info > h3 > a')?.textContent,
url: element.querySelector('div.meta-info > h3 > a')?.href
})
}
return postLinks;
});
console.log(postLinks);
await browser.close();
})();
app.get('/', (req, res) => {
var term = "Drake";
res.send(farm);
});
With the code below, I can pass parameters but I can't return the result in "res.send", and the function is called endlessly :
const farm = async (term) => {
const browser = await puppeteer.launch({headless: true});
const page = await browser.newPage();
await page.goto("https://www.example.com/?s=" + term);
await page.waitForSelector("div");
const postLinks = await page.evaluate(() => {
let postLinks = [];
let elements = document.querySelectorAll('div.article');
for (element of elements) {
postLinks.push({
title: element.querySelector('div.meta-info > h3 > a')?.textContent,
url: element.querySelector('div.meta-info > h3 > a')?.href
})
}
return postLinks;
});
console.log(postLinks);
await browser.close();
}
app.get('/', (req, res) => {
var term = "Drake";
var results = farm(term);
res.send(results);
});
Did I miss something ?
Thanks !
It's not an infinite loop, but unresolved promise. The farm returns a promise, which you're not waiting for, but instead send the pending promise before it resolves, i.e. before the puppeteer is done.
You need to wait for farm's promise to resolve, make middleware function async and add await to the farm call:
app.get('/', async(req, res) => {
var term = "Drake";
// farm returns a promise, so you need to wait for it to resolve, i.e. block execution
// otherwise it just sends pending promise, because node.js runs in non-blocking fashion
var results = await farm(term);
res.send(results);
});
I failed to get a result from TinyURL within the POST method and assign it to "short_url" for the response. The console.log(short_url) will show "Promise { pending }". I tried async / await function to retrieve the TinyURL result but I'm sure I'm not using it right.
var express = require('express')
var TinyURL = require('tinyurl')
var app = express()
app.use(express.json())
app.use(express.static('public'))
app.get("/", function (req, res) {
res.sendFile(__dirname + '/index.html')
})
app.post('/api/shorturl', (req, res) => {
let original_url = req.body.url
console.log(original_url) // this one shows correct URL from request body
async function createShortURL(url) {
await TinyURL.shorten(url, function(res) {
console.log(res) // this one shows correct shortened URL
}
)}
let short_url = createShortURL(original_url)
console.log(short_url) // this one shows "Promise { <pending> }"
res.json({
original_url : original_url,
short_url : short_url
})
})
var listener = app.listen(process.env.PORT || 3000, function () {
console.log('Your app is listening on port ' + listener.address().port)
})
You're mixing async/await and callback. Don't do that. The tinyurl library provides the Promise version of shorten method. We can use async/await directly.
app.post('/api/shorturl', async (req, res) => {
let original_url = req.body.url
console.log(original_url) // this one shows correct URL from request body
// just this
let short_url = await TinyURL.shorten(url);
console.log(short_url)
res.json({
original_url : original_url,
short_url : short_url
})
})
EDIT
If you're using callback, please be aware of callback hell. It's one of the main reasons why people prefer async/await.
i have a problem, I need to read a csv file from a web page developed using express.
Basically I have
router.get('/metricas', function(req, res, next) {
var datos= lee.leedatos();
res.render('metricas', {page:'Metricas', menuId:'metricas',data:datos});
});
and the lee.leedatos() is the following
exports.leedatos= function(){
var datos;
const csv =require('fast-csv')
const stream =fs.createReadStream('mywebsite/book1.csv');
const streamCsv=csv({
headers:true,
})
.on('data',data=>{
datos=data;
//console.log(datos);
})
.on('finish',()=>{
console.log('finished')
console.log(datos)
return datos;
})
stream.pipe(streamCsv);
}
My problem is that the web page always returns before the file has been read at all :( .. and data:datos is always empty.
How can I make the call syncronous?
Thanks !
You can use either callbacks / promises to track execution of asynchronous code. This is how code would look like with promises:
router.get('/metricas', function(req, res, next) {
var datos= lee.leedatos().then(() => {
res.render('metricas', {page:'Metricas', menuId:'metricas',data:datos});
});
});
exports.leedatos = function () {
return new Promise((resolve, reject) => {
var datos;
const csv = require('fast-csv')
const stream = fs.createReadStream('mywebsite/book1.csv');
const streamCsv = csv({
headers: true,
}).on('data', data => {
datos = data;
//console.log(datos);
}).on('finish', () => {
console.log('finished')
console.log(datos)
resolve(datos);
})
stream.pipe(streamCsv);
})
}
Here is a link to blog post that might help you in understanding asynchronous functions: https://blog.risingstack.com/node-hero-async-programming-in-node-js/
I am working on an async problem. I'm making a web scraper and after I scrape the web, I need to put the data in my MongoDB database after putting it in. I need to send it into the frontend, but since I have a loop the elements I can't put the res.json() inside, as it'll gave an error (you can only send once after res.json()).
I'm stuck here. I've used promises before, but this is confusing.
router.get('/scrape', (req, res) => {
request('http://www.nytimes.com', function test(error, response, html) {
const $ = cheerio.load(html);
// An empty array to save the data that we'll scrape
const results = [];
$('h2.story-heading, p.summary').each(function(i, element) {
const link = $(element)
.children()
.attr('href');
const title = $(element)
.children()
.text();
const summary = $(element)
.children()
.text();
const data = {
title: title,
link: link,
summary: summary,
};
articles
.create(data)
.then((resp) => results.push(resp))
// .then((resp) => Promise.resolve(results)) //
// .then((jsonDta ) => res.json(jsonData)) // error you can only give response once.
.catch((err) => reject(err));
});
console.log(results); // empty array
res.json(results)// empty
});
});
My plan is:
to scrape a site (loop the elements)
then save into MongoDB (push the data into an array)
then after the loop pass it to the frontend.
I need to put the query method create... inside the loop because I need each data to have an id.
Instead of trying to accumulate results directly, you can map the elements contained in $('h2.story-heading, p.summary') to an array of promises, then aggregate with Promise.all(). The results you want will be delivered by Promise.all(...).then(...).
router.get('/scrape', (req, res) => {
request('http://www.nytimes.com', function test(error, response, html) {
const $ = cheerio.load(html);
const promises = $('h2.story-heading, p.summary')
.get() // as in jQuery, .get() unwraps Cheerio and returns Array
.map(function(element) { // this is Array.prototype.map()
return articles.create({
'title': $(element).children().text(),
'link': $(element).children().attr('href'),
'summary': $(element).children().text()
})
.catch(err => { // catch so any one failure doesn't scupper the whole scrape.
return {}; // on failure of articles.create(), inject some kind of default object (or string or whatever).
});
});
// At this point, you have an array of promises, which need to be aggregated with Promise.all().
Promise.all(promises)
.then(results => { // Promise.all() should accept whatever promises are returned by articles.create().
console.log(results);
res.json(results);
});
});
});
If you want any single failure to scupper the whole scrape, then omit the catch() and add catch() to the Promise.all().then() chain.
Notes:
For .get() (and most other methods), the jQuery documentation is better than the Cheerio documentation (but be careful because Cheerio is a lean version of jQuery).
At no point do you need new Promise(). All the promises you need are returned by articles.create().
Something like this might work (code not tested)
router.get('/scrape', (req, res) => {
request('http://www.nytimes.com', function test(error, response, html) {
const $ = cheerio.load(html);
// An empty array to save the data that we'll scrape
const results = [];
$('h2.story-heading, p.summary').each(function(i, element) {
const link = $(element)
.children()
.attr('href');
const title = $(element)
.children()
.text();
const summary = $(element)
.children()
.text();
const data = {
title: title,
link: link,
summary: summary,
};
const articleCreate = articles.create(data);
results.push(articleCreate);
});
console.log(results); // this is array of promise functions.
Promise.all(results).then(allResults => {
res.json(allResults)
});
// or you could use array.reduce for sequantial resolve instead of Promise.all
});
});
Use .map function to return all promises to Promise.all and then return the results.
request('http://www.nytimes.com', function test(error, response, html) {
const $ = cheerio.load(html);
var summary = $('h2.story-heading, p.summary')
Promise.all(summary.map((i, element) =>{
const data = {
title: $(element).children().text(),
link: $(element).children().attr('href'),
summary: $(element).children().text(),
};
return articles
.create(data)
}).get())
.then((result)=>{
console.log(result);
res.json(result);
});
})