How do I access the data array in my axios function - node.js

I'm trying to access the data array that has the information from the request, but I cannot seem to figure it out.
When I try to display it in my view, I get an undefined cannot read property of length;
const getData = () => {
axios.get("https://www.worldometers.info/coronavirus/")
.then(res => {
const data = [];
const $ = cheerio.load(res.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = {numberData: numberData};
});
}).catch(err => {
console.log("Error fetching and parsing data: ", err);
});
}
app.get("/", (req, res) => {
const data = getData();;
res.render('index', {title: 'Home', data: data});
});
Pug View
p #{data.dataNumbers}
I've also tried this function as well but I get the same issue
async function scrapeWorldOMeter(){
try{
const worldOMeterResponse = await axios.get("https://www.worldometers.info/coronavirus/");
const data = [];
const $ = cheerio.load(worldOMeterResponse.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = {numberData: numberData};
return data[0];
});
}
catch(err){
throw new Error(`Can't scrape WorldOMeter ${err}`)
}
}
app.get("/", async(req, res) => {
const data = await scrapeWorldOMeter()
res.render('index', {title: 'Home', data});
});

You can fix your first code snippet by using promises properly,
const getData = () => {
return axios
.get('https://www.worldometers.info/coronavirus/')
.then((res) => {
const data = [];
const $ = cheerio.load(res.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = { numberData: numberData };
});
return data[0];
})
.catch((err) => {
console.log('Error fetching and parsing data: ', err);
});
};
app.get('/', (req, res) => {
const data = getData().then((res) => {
res.render('index', { title: 'Home', data: data });
});
});
The problem here was that you were not returning the axios promise in getData() and also you were not chaining the response with then in the get route.
Your second code snippet can be fixed by using async/await properly,
async function scrapeWorldOMeter() {
try {
const worldOMeterResponse = await axios.get(
'https://www.worldometers.info/coronavirus/'
);
const data = [];
const $ = cheerio.load(worldOMeterResponse.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = { numberData: numberData };
});
return data[0];
} catch (err) {
throw new Error(`Can't scrape WorldOMeter ${err}`);
}
}
app.get("/", async(req, res) => {
const data = await scrapeWorldOMeter();
res.render('index', {title: 'Home', data});
});
The problem here was that you were returning data[0] inside the jquery each loop. You simply have to move it to the next line outside the loop

Related

Parse res.text Node express

I am retrieving a csv-file from a cloud based storage as res.text and need to convert it to json.
I am wondering if I should do the parsing in the return of fetchUrl or if I should do it in the route (res.send)?
const fetchUrl = async () => {
const URL_1 = 'https://file.csv'
const res = await fetch(URL_1)
return res.text()
}
router.get('/data', async (req, res, next) => {
try {
const getAllData = await fetchUrl();
console.log(getAllData, 'fetching?');
res.send(getAllData);
} catch (err) {
next(err);
//res.send({ message: err })
// res.status(404).send(err)
console.log(err)
}
})
I made use of a custom function, that converts it to json in the 2nd .then. So that I won't have to fetch upon every call to the endpoint since the data does not change.
Like so:
let getAllData
fetch('https://file.csv')
.then(res => res.text())
.then(data => {
getAllData = csvToJSON(data)
getAllData.forEach((item) => {
item.startTime = new Date(item.startTime)
})
})

Accessing Data from axios on a view Node.js

I'm trying to print the data from an axios request to a pug template. I'm having issues with access the data. I'm new to this and I think it has something to do with my data object being passed in to the route.
The Backend
axios.get("https://www.worldometers.info/coronavirus/")
.then(res => {
const data = [];
const $ = cheerio.load(res.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = {numberData: numberData};
//console.log(data);
});
}).catch(err => {
console.log("Error fetching and parsing data: ", err);
});
app.get("/", (req, res) => {
res.render('index', {title: 'Home', data: data});
});
The front end
p #{data.numberData}
There is problem with scoping. The data variable is bound to local scope.
async function scrapeWorldOMeter(){
try{
const worldOMeterResponse = await axios.get("https://www.worldometers.info/coronavirus/");
const data = [];
const $ = cheerio.load(worldOMeterResponse.data);
$('.maincounter-number').each((index, element) => {
const numberData = $(element).text();
data[0] = {numberData: numberData};
return data[0];
});
}
catch(err){
throw new Error(`Can't scrape WorldOMeter ${err}`)
}
}
app.get("/", async(req, res) => {
const data = await scrapeWorldOMeter()
res.render('index', {title: 'Home', data});
});

How can i access nested promise data?

I am trying to set up a route that sends data from a nested promise to my vue app.
But i'm having trouble with the getting data from the nested promises.
i tried using a callback with no success
app.get('/notification', (req, res) => {
const getData = (data) => {
console.log(data)
}
scheduler(data)
})
const scheduler = (callback) => {
sftp
.connect({ credentials })
.then(() => {
return sftp.list(root);
})
.then(async data =>
{
const filteredFile = data.filter(file => {
let currentDate = moment();
let CurrentTimeMinusFive = moment().subtract(5, "minutes");
let allAccessTimes = file.accessTime;
let parsedAccessTimes = moment(allAccessTimes);
let filteredTime = moment(parsedAccessTimes).isBetween(
CurrentTimeMinusFive,
currentDate
);
return filteredTime;
});
for (const file of filteredFile) {
let name = file.name;
let filteredThing;
await sftp
.get(`Inbound/${name}`)
.then(data => {
csv()
.fromString(data.toString())
.subscribe(function (jsonObj) {
return new Promise(function (resolve, reject) {
filteredThing = new Notification(jsonObj);
filteredThing.save()
.then(result => {
console.log(result);
callback(result) **// THIS IS THE RESULT I NEED IN MY FRONT END**
})
.catch(err => {
console.log(err);
});
resolve();
});
});
});
}
})
When i go to localhost/notification i get:
ReferenceError: data is not defined
Thanks in advance!

Display all collections in mongodb+express

This is my code for sending data to a database:
app.post('/thanks', function(req, res) {
if (atendees.checkin === req.body.dbstring) {
dbConn.then(client => {
delete req.body._id;
const db = client.db('mydata')
db.collection(atendees.checkin).insertOne(req.body);
})
(...)
This is how I display on the page after clicking on a href link:
app.get('/view-feedbacks', function(req, res) {
dbConn.then(client => {
const db = client.db('mydata')
db.collection(atendees.checkin).find({}).toArray().then(function(feedbacks) {
res.status(200).json(feedbacks);
atendees.checkin = ' '
}).catch(err => {
throw(err);
})
});
});
That works fine. How can I do something similar to display all collections from the database instead of just the individual ones?
This is what I tried to do:
app.get('/view-history', function(req, res) {
dbConn.then(client => {
const db = client.db('mydata')
db.listCollections().toArray().then(function(collInfos) {
res.status(200).json(collInfos);
atendees.checkin = ' '
}).catch(err => {
throw(err);
})
});
});
But it just gives me the name of each collection. I want to show all collections and all of their elements.
Edit: my question is different from this one: MongoDB Show all contents from all collections .I'm trying to do this on express.js, not on the terminal
Edit2: Using db.collection:
app.get('/view-history', function(req, res) {
dbConn.then(client => {
const db = client.db('mydata')
db.collections().then(function(feedbacks) {
res.status(200).json(feedbacks);
atendees.checkin = ' '
}).catch(err => {
throw(err);
})
});
But this gives the error: TypeError: converting circular structure to JSON
With async/await, this could be done:
app.get('/view-history', async (req, res) => {
try {
const client = await dbConn;
const db = client.db('mydata');
let collections = await db.collections();
let documents = await Promise.all(collections.map(async (collection) => {
let documents = await collection.find({}).toArray();
return Promise.resolve([collection.collectionName, documents]); // Retain collectionName
}));
// Format into an object that looks like `collectionName: documents`
let formatted = documents.reduce((obj, collection) => {
obj[collection[0]] = collection[1];
return obj;
}, {});
res.json(formatted);
} catch (e) {
console.error(e);
res.sendStatus(500);
}
});
A Promise-only approach:
app.get('/view-history', (req, res) => {
dbConn.then((client) => {
const db = client.db('mydata');
return db.collections();
}).then((collections) => {
return Promise.all(collections.map((collection) => {
return new Promise((resolve, reject) => {
collection.find({}).toArray().then((documents) => {
resolve([collection.collectionName, documents]);
}).catch(reject);
});
}));
}).then((documents) => {
let formatted = documents.reduce((obj, collection) => {
obj[collection[0]] = collection[1];
return obj;
}, {});
res.json(formatted);
}).catch((e) => {
console.error(e);
res.sendStatus(500);
});
});
The main reason this code is unnecessarily verbose is because instead of just returning a big array filled with arrays of documents, you probably want an object that retains the name of the collection, like so:
{
collection1: [...documents...],
collection2: [...documents...],
...
}
Instead of:
[
[...documents...],
[...documents...],
...
]
If you do want just a big array of each collection without caring about the names of the collections, it becomes much simpler:
async/await version:
app.get('/view-history', async (req, res) => {
try {
const client = await dbConn;
const db = client.db('mydata');
let collections = await db.collections();
let documents = await Promise.all(collections.map((collection) => collection.find({}).toArray()));
res.json(documents);
} catch (e) {
console.error(e);
res.sendStatus(500);
}
});
Promise-only version:
app.get('/view-history', (req, res) => {
dbConn.then((client) => {
const db = client.db('mydata');
return db.collections();
}).then((collections) => {
return Promise.all(collections.map((collection) => collection.find({}).toArray()));
}).then((documents) => {
res.json(documents);
}).catch((e) => {
console.error(e);
res.sendStatus(500);
});
});
Have you tried just db.collections()? If that also doesn't give what you need, you might have to invoke db.collection(<name>) on each of the names you get from listCollections.

Mongoose remove and create in get route

I have a small issue with mongoose, what I am doing is getting data from online rss feeds, parsing it, and passing it to an array, from which I feed a mongoose model, and all this happens in the get route, what I want to accomplish is delete all the data first from the mongoose model and then populate it with the new data, but it always either deletes the data all together, since the parser iterates a few times, or it doesn't delete anything and the data just keeps adding to the model.
Here's my code
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
router.get('/', function (req, res) {
const fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
feed.records.forEach(record => {
dataArray.push(record);
});
});
}).catch(function (error) {
console.log(error);
});
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed');
}
});
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
});
module.exports = router;
None of this is going to run synchronously. You can do Something like this :
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
const fetch;
router.get('/', function (req, res) {
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed. Starting to fetch Posts from Service');
fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
}
});
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
dataArray = dataArray.concat(feed.records);
/*feed.records.forEach(record => {
dataArray.push(record);
});*/
});
console.log('inserting posts in the collection');
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
console.log("Fetching posts from the collection");
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
}).catch(function (error) {
console.log(error);
});
});
module.exports = router;
I haven't tested this. Please test it on your end. Let me know if there's an error or something.

Resources