I'm trying to call getMarchandiseList() method that call a mongoose method " find({}) " in my marchandise router.
the problem is when I send the responce I find that I get a result=0 before the find({}) method send it's result
//the result should be
{id_marchandise : 01 , libelle_marchandise : 'fer'}.
[MarchandiseDAO.js][1]
*
var express = require("express");
var router = express.Router();
var marchandiseDAO = require ('../DAO/marchandises');
router.get('/listmarchandise', function (req,res) {
var marchandise = new marchandiseDAO();
marchandise.getMarchandiseList();
res.send("result" +marchandise.result);
});
module.exports = router;
*
[marchandiserouter.js][2]
var model = require("../Models/model");
var marchandiseDAO = function () {
console.log("get instance ");
this.result = 0 ;
}
marchandiseDAO.prototype.getMarchandiseList = function () {
console.log("begin");
model.MarchandiseModel.find({},function(err,marchandiselist){
console.log("traitement");
if(err) result = err;
else result = marchandiselist;
});
console.log("end");
}
You cannot run mongoose methods synchronously. But if you use a modern version of Node then you can make it appear as if it was run asynchronously.
Unfortunately you posted your code examples as screenshots which would require me to use Photoshop to fix your code, which is obviously not worth the hassle. So instead I will show you a general solution.
Using normal promises:
Model.find({...}).then(data => {
// you can only use data here
}).catch(err => {
// always handle errors
});
Using await:
try {
let data = await Model.find({...});
// you can only use data here
} catch (err) {
// always handle errors
}
The second example can only be used in an async function. See this for more info:
try/catch blocks with async/await
Use await outside async
Using acyns/await in Node 6 with Babel
When do async methods throw and how do you catch them?
using promises in node.js to create and compare two arrays
Keeping Promise Chains Readable
function will return null from javascript post/get
Related
I have created a saperate js file in node js and I am trying to connect to postgres sql using below code but it never worked. always promise is pending. code is not waiting at this line (await client.connect()). I Could not understand what the issue is. can any one please help me on this.
const pg = require('pg');
async function PostgresConnection(query,config) {
const client = new pg.Client(config);
await client.connect(); //not waiting for the connection to succeed and returning to caller and exit
const pgData = await client.query(query);
await client.end();
return pgData;
}
async function uploadDataToPostgres(config) {
var query="select * from firm_data";
await PostgresConnection(query, config);
}
module.exports = {
uploadDataToPostgres
}
I am trying to call above method from other page
function ProcessData()
{
var result = uploadDataToPostgres(config);
}
When you invoke an async function from outside an async function, you need to treat it as a Promise. Change your invoker function to look like this:
function ProcessData() {
uploadDataToPostgres(config)
.then(function success(result) {
/* do something useful with your result */
console.log(result)
})
.catch(function error(err) {
console.error('uploadDataToPostgres failure', err)
})
}
Your code, var result = uploadDataToPostgres(config), stashes a Promise object in result. But the code behind the promise (the code in your async function) doesn't run until you invoke .then() or .else() on it.
It's a bit confusing until you completely grasp the similarity between async functions and Promises: async functions are Promise functions with nice syntax.
It's possible your uploadDataToPostgres() function throws an error (failure to connect to the db?) and the .catch() will catch that.
I'm building a Slackbot that makes a call to an Express app, which then needs to 1) fetch some other data from the Slack API, and 2) insert resulting data in my database. I think I have the flow right finally using async await, but the operation is timing out because the original call from the Slackbot needs to receive a response within some fixed time I can't control. It would be fine for my purposes to ping the bot with a response immediately, and then execute the rest of the logic asynchronously. But I'm wondering the best way to set this up.
My Express route looks like:
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
const slack = require('../../services/slack_helpers');
// POST api/slack/foo
router.post('/foo', async (req, res) => {
let [body, images] = await slack.grab_context(req);
knex('texts')
.insert({ body: body,
image_ids: images })
.then(text => { res.send('worked!'); }) // This sends a response back to the original Slackbot call
.catch(err => { res.send(err); })
});
module.exports = router;
And then the slack_helpers module looks like:
const { WebClient } = require('#slack/web-api');
const Slack = new WebClient(process.env.SLACKBOT_TOKEN);
async function grab_context(req) {
try {
const context = await Slack.conversations.history({ // This is the part that takes too long
channel: req.body.channel_id,
latest: req.headers['X-Slack-Request-Timestamp'],
inclusive: true,
limit: 5
});
} catch (error) {
return [error.toString(), 'error'];
}
return await parse_context(context);
};
function parse_context(context) {
var body = [];
context.messages.forEach(message => {
body.push(message.text);
});
body = body.join(' \n');
return [body, ''];
}
module.exports = {
grab_context
};
I'm still getting my head around asynchronous programming, so I may be missing something obvious. I think basically something like res.send perhaps needs to come before the grab_context call? But again, not sure the best flow here.
Update
I've also tried this pattern in the API route, but still getting a timeout:
slack.grab_context(req).then((body, images) => {
knex ...
})
Your timeout may not be coming from where you think. From what I see, it is coming from grab_context. Consider the following simplified version of grab_context
async function grab_context_simple() {
try {
const context = { hello: 'world' }
} catch (error) {
return [error.toString(), 'error']
}
return context
}
grab_context_simple() /* => Promise {
<rejected> ReferenceError: context is not defined
...
} */
You are trying to return context outside of the try block where it was defined, so grab_context will reject with a ReferenceError. It's very likely that this error is being swallowed at the moment, so it would seem like it is timing out.
The fix is to move a single line in grab_context
async function grab_context(req) {
try {
const context = await Slack.conversations.history({
channel: req.body.channel_id,
latest: req.headers['X-Slack-Request-Timestamp'],
inclusive: true,
limit: 5
});
return await parse_context(context); // <- moved this
} catch (error) {
return [error.toString(), 'error'];
}
};
I'm wondering the best way to set this up.
You could add a higher level try/catch block to handle errors that arise from the /foo route. You could also improve readability by staying consistent between async/await and promise chains. Below is how you could use async/await with knex, as well as the aforementioned try/catch block
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
const slack = require('../../services/slack_helpers');
const insertInto = table => payload => knex(table).insert(payload)
const onFooRequest = async (req, res) => {
try {
let [body, images] = await slack.grab_context(req);
const text = await insertInto('texts')({
body: body,
image_ids: images,
});
res.send('worked!');
} catch (err) {
res.send(err);
}
}
router.post('/foo', onFooRequest);
module.exports = router;
MY PROBLEM:
I'm building a web-scraper with Cheerio, Node.js, and Google Cloud Functions.
The problem is I need to make multiple requests, then write data from each request to a Firestore database before calling response.send() and thereby terminating the function.
My code requires two loops: the first loop is with urls from my db, with each one making a separate request. The second loop is with Cheerio using .each to scrape multiple rows of table data from the DOM and make a separate write for each row.
WHAT I'VE TRIED:
I've tried pushing each request to an array of promises and then waiting for all the promises to resolve with promises.all() before calling res.send(), but I'm still a little shaky on promises and not sure that is the right approach. (I have gotten the code to work for smaller datasets that way, but still inconsistently.)
I also tried creating each request as a new promise and using async/await to await each function call from the forEach loop to allow time for each request and write to fully finish so I could call res.send() afterward, but I found out that forEach doesn't support Async/await.
I tried to get around that with the p-iteration module but because its not actually forEach but rather a method on the query (doc.forEach()) I don't think it works like that.
So here's my code.
NOTE:
As mentioned, this is not everything I tried (I removed my promise attempts), but this should show what I am trying to accomplish.
export const getCurrentLogs = functions.https.onRequest((req, response) => {
//First, I make a query from my db to get the urls
// that I want the webscraper to loop through.
const ref = scheduleRef.get()
.then((snapshot) => {
snapshot.docs.forEach((doc) => {
const scheduleGame = doc.data()
const boxScoreUrl = scheduleGame.boxScoreURL
//Inside the forEach I call the request
// as a function with the url passed in
updatePlayerLogs("https://" + boxScoreUrl + "/");
});
})
.catch(err => {
console.log('Error getting schedule', err);
});
function updatePlayerLogs (url){
//Here I'm not sure on how to set these options
// to make sure the request stays open but I have tried
// lots of different things.
const options = {
uri: url,
Connection: 'keep-alive',
transform: function (body) {
return cheerio.load(body);
}
};
request(options)
.then(($) => {
//Below I loop through some table data
// on the dom with cheerio. Every loop
// in here needs to be written to firebase individually.
$('.stats-rows').find('tbody').children('tr').each(function(i, element){
const playerPage = $(element).children('td').eq(0).find('a').attr('href');
const pts = replaceDash($(element).children('td').eq(1).text());
const reb = replaceDash($(element).children('td').eq(2).text());
const ast = replaceDash($(element).children('td').eq(3).text());
const fg = replaceDash($(element).children('td').eq(4).text());
const _3pt = replaceDash($(element).children('td').eq(5).text());
const stl = replaceDash($(element).children('td').eq(9).text());
const blk = replaceDash($(element).children('td').eq(10).text());
const to = replaceDash($(element).children('td').eq(11).text());
const currentLog = {
'pts': + pts,
'reb': + reb,
'ast': + ast,
'fg': fgPer,
'3pt': + _3ptMade,
'stl': + stl,
'blk': + blk,
'to': + to
}
//here is the write
playersRef.doc(playerPage).update({
'currentLog': currentLog
})
.catch(error =>
console.error("Error adding document: ", error + " : " + url)
);
});
})
.catch((err) => {
console.log(err);
});
};
//Here I call response.send() to finish the function.
// I have tried doing this lots of different ways but
// whatever I try the response is being sent before all
// docs are written.
response.send("finished writing logs")
});
Everything I have tried either results in a deadline exceeded error (possibly because of quota limits which I have looked into but I don't think I should be exceeding) Or some unexplained error where the code doesn't finish executing but shows me nothing in the logs.
Please help, is there a way to use async/await in this scenario that I am not understanding? Is there a way to use promises to make this elegant?
Many thanks,
Maybe have a look at something like this. It uses Bluebird promises and the request-promise library
const Promise = require('bluebird');
var rp = require('request-promise');
const urlList = ['http://www.google.com', 'http://example.com']
async function getList() {
await Promise.map(urlList, (url, index, length) => {
return rp(url)
.then((response) => {
console.log(`${'\n\n\n'}${url}:${'\n'}${response}`);
return;
}).catch(async (err) => {
console.log(err);
return;
})
}, {
concurrency: 10
}); //end Promise.map
}
getList();
I'm trying to follow a project from mozilla developer local library tutorial
. It includes learning how to use nodejs, express, mongoose and mongoDB. I am swapping out mongoose and mongoDB with firestore and using firebase functions to eventually host with firebase hosting.
The project uses a model, view, controller structure. I really want to stick to this structure to help break everything down into manageable chunks.
The first problem I’m trying to resolve is to return an array that I created in a promise to be exported as a function that is called from booksController.js.
A concise view of the structure I have is:
index.js
const booksRouter = require('./routes/books');
app.use('/books', booksRouter);
routes/books.js
const books_controller = require('../controllers/booksController');
router.get('/', books_controller.index);
controllers/booksController.js
var Books = require('..models/books');
exports.index = function(req, res){
var books = Books.AllBooks();
res.render('books',{books: books});
};
models/books.js
var booksRef = db.collection(books);
var allBooks = booksRef.get()
.then(snapshot => {
var books = [];
Snapshot.forEach(doc => {
books.push({doc.data: doc.data()});
});
});
.catch(err =>{
console.log('Error getting documents', err);
});
exports.AllBooks = function(req, res){
return books;
}
I've tried wrapping the promise in a function and returning the array to export it but I get undefined when console logging in booksController.js.
Can someone please enlighten me and complete the puzzle of how to return the array so it can be exported.
What you're trying to do, can't be done using require, since require is synchronous.
Instead what you can do is export a function that will fetch the books asynchronously, and use that function in your controller.
models/books.js
const booksRef = db.collection(books);
async function getBooks() {
// You can implement a cache and fetch the books only once.
const snapshot = await booksRef.get()
const books = [];
snapshot.forEach(doc => {
books.push({
doc.data: doc.data()
});
});
return books;
}
module.exports = getBooks;
controllers/booksController.js
const getBooks = require('./../models/books');
exports.index = async function(req, res){
const books = await getBooks(); // Handle exception
res.render('books',{books: books});
};
How come this async/await doesn't work?
I've spent all day trying different combinations, watching videos and reading about async/await to find why this doesn't work before posting this here.
I'm trying to make a second nodejs app that will run on a different port, and my main app will call this so it scrap some data and save it to the db for cache.
What it's suppose to do:
Take a keyword and send it to a method called scrapSearch, this method create a complete URI link and send it to the method that actually get the webpage and returns it up to the first caller.
What is happening:
The console.log below the initial call is triggered before the results are returned.
Console output
Requesting : https://www.google.ca/?q=mykeyword
TypeError: Cannot read property 'substr' of undefined
at /DarkHawk/srv/NodesProjects/_scraper/node_scrapper.js:34:18
at <anonymous>
app.js:
'use strict';
var koa = require('koa');
var fs = require('fs');
var app = new koa();
var Router = require('koa-router');
var router = new Router();
app
.use(router.routes())
.use(router.allowedMethods());
app.listen(3002, 'localhost');
router.get('/scraptest', async function(ctx, next) {
var sfn = require('./scrap-functions.js');
var scrapFunctions = new sfn();
var html = await scrapFunctions.scrapSearch("mykeyword");
console.log(html.substr(0, 20));
//Normally here I'll be calling my other method to extract content
let json_extracted = scrapFunctions.exGg('mykeywords', html);
//Save to db
});
scrap-functions.js:
'use strict';
var request = require('request');
var cheerio = require('cheerio');
function Scraper() {
this.html = ''; //I tried saving html in here but the main script seems to have issues
retrieving that
this.kw = {};
this.tr = {};
}
// Search G0000000gle
Scraper.prototype.scrapSearch = async function(keyword) {
let url = "https://www.google.ca/?q="+keyword";
let html = await this.urlRequest(url);
return html;
};
// Get a url'S content
Scraper.prototype.urlRequest = async function(url) {
console.log("Requesting : "+url);
await request(url, await function(error, response, html) {
if(error) console.error(error);
return response;
});
};
module.exports = Scraper;
I tried a lot of things but I finally gave up - I tried putting await/async before each methods - didn't work either.
Why that isn't working?
Edit: wrong function name based on the fact that I created 2 different projects for testing and I mixed the file while copy/pasting.
You are not returning anything from urlRequest. Because it is an async function, it will still create a promise, but it will resolve with undefined. Therefore your html is undefined as seen in the error.
The problematic part is the request function which is a callback style function, but you're treating it as a promise. Using await on any value that is not a promise, won't do anything (technically it creates a promise that resolves directly with the value, but the resulting value remains the same). Both awaits within the urlRequest are unnecessary.
request(url, function(error, response, html) {
if(error) console.error(error);
// This return is for the callback function, not the outer function
return response;
});
You cannot return a value from within the callback. As it's asynchronous, your function will already have finished by the time the callback is called. With the callback style you would do the work inside the callback.
But you can turn it into a promise. You have to create a new promise and return it from urlRequest. Inside the promise you do the asynchronous work (request) and either resolve with the value (the response) or reject with the error.
Scraper.prototype.urlRequest = function(url) {
console.log("Requesting : "+url);
return new Promise((resolve, reject) => {
request(url, (err, response) => {
if (err) {
return reject(err);
}
resolve(response);
});
});
};
When an error occurred you want to return from the callback, so the rest (successful part) is not executed. I also removed the async keyword, because it's manually creating a promise.
If you're using Node 8, you can promisify the request function with the built-in util.promisify.
const util = require('util');
const request = require('request');
const requestPromise = util.promisify(request);
Scraper.prototype.urlRequest = function(url) {
console.log("Requesting : " + url);
return requestPromise(url);
};
Both versions will resolve with the response and to get the HTML you need to use response.body.
Scraper.prototype.scrapSearch = async function(keyword) {
let url = "https://www.google.ca/?q=" + keyword;
let response = await this.urlRequest(url);
return response.body;
};
You still need to handle errors from the promise, either with .catch() on the promise, or using try/catch when you await it.
It is absolutely essential to understand promises when using async/await, because it's syntactic sugar on top of promises, to make it look more like synchronous code.
See also:
Understand promises before you start using async/await
Async functions - making promises friendly
Exploring ES6 - Promises for asynchronous programming