So im completely stumped and hope someone can help with the combination of Node JS Async and Request modules. I'm attempting to build a list of file to download which I pass to Async, as an array of object contain all the information I need to download and store said file. After tons of debugging I discovered that Request are not even making there way out and I cant figure out why.
async.each(missingFiles,
function (obj, cb) {
console.log(obj.url);
//var file = nfs.createWriteStream(obj.fullPath);
request(obj.url, function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(response)
}
cb();
})
},
function (err) {
if (err) {
console.log("Async failed");
}
}
);
I came across similar issues before. If you send response outside the async block, http request/response cycle ends before your async tasks complete. The fix is to have send response in the done() callback.
app.post("/download", function(req, res) {
async.eachSeries(missingFiles, function (obj, cb) {
...
//do your work here
cb()
}, function done() {
res.json({success: true});
});
}
Related
Hello i have a request which fetch some json data from third party API:
request({
url: 'https://api.steampowered.com/IEconService/GetTradeOffers/v1/?key=MYAPIKEY&get_sent_offers=1&active_only=1&format=json',
json: true
}, (err, responser, body, undefined) => {
tradeItems = JSON.stringify(body.response['trade_offers_sent'][0].items_to_give);
});
How can i send tradeItems fetched data to offer.addTheirItems value?
client.on('webSession', function(sessionID, cookies) {
manager.setCookies(cookies, function(err) {
if (err) {
console.log(err);
process.exit(1);
return;
}
let offer = manager.createOffer("https://steamcommunity.com/tradeoffer/new/?partner=123456789&token=1234");
offer.addTheirItems();
offer.setMessage("");
offer.send(function(err, status) {
if (err) {
console.log(err);
return;
}
First, that's are javascript's async issue.
The solution is in many ways.
change the request function to async function. and make tradeItems variable to outside from request function.
I recommend request-promise module
move below codes to in upper code's callback function.
This is a simple answer because your sample code is separated into two parts.
I have a simple node Express app that has a service that makesa call to a node server. The node server makes a call to an AWS web service. The AWS simply lists any S3 buckets it's found and is an asynchronous call. The problem is I don't seem to be able to get the server code to "wait" for the AWS call to return with the JSON data and the function returns undefined.
I've read many, many articles on the web about this including promises, wait-for's etc. but I think I'm not understanding the way these work fully!
This is my first exposer to node and I would be grateful if somebody could point me in the right direction?
Here's some snippets of my code...apologies if it's a bit rough but I've chopped and changed things many times over!
Node Express;
var Httpreq = new XMLHttpRequest(); // a new request
Httpreq.open("GET","http://localhost:3000/listbuckets",false);
Httpreq.send(null);
console.log(Httpreq.responseText);
return Httpreq.responseText;
Node Server
app.get('/listbuckets', function (req, res) {
var bucketData = MyFunction(res,req);
console.log("bucketData: " + bucketData);
});
function MyFunction(res, req) {
var mydata;
var params = {};
res.send('Here are some more buckets!');
var request = s3.listBuckets();
// register a callback event handler
request.on('success', function(response) {
// log the successful data response
console.log(response.data);
mydata = response.data;
});
// send the request
request.
on('success', function(response) {
console.log("Success!");
}).
on('error', function(response) {
console.log("Error!");
}).
on('complete', function() {
console.log("Always!");
}).
send();
return mydata;
}
Use the latest Fetch API (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) to make HTTP calls. It has built-in support with Promise.
fetch('http://localhost:3000/listbuckets').then(response => {
// do something with the response here
}).catch(error => {
// Error :(
})
I eventually got this working with;
const request = require('request');
request(url, function (error, response, body) {
if (!error && response.statusCode == 200) {
parseString(body, function (err, result) {
console.log(JSON.stringify(result));
});
// from within the callback, write data to response, essentially returning it.
res.send(body);
}
else {
// console.log(JSON.stringify(response));
}
})
I have been using async module to manage my code flow and it's indeed a very powerful lib. However, as the code grows bigger and logical flow become more complex, I run into a problem with too many async_callback to manage. Some pseudo code to demo the issue as below
async.auto({
getjobs:function(cb){},
loopjobs:['getjobs']{
async.map(jobs,dojob(job, cb_map),cb);
},
}, function(err, result) {
//handle
});
function dojob(job, callback) {
async.auto({
step1:function(cb){},
step2:['step1', function(cb) {}],
step3:['step1', function(cb) {
moreloops(params, cb);
}]
}, callback)
}
function moreloops(params, cb) {
async.map(params, doloop(param, cb_map), cb);
}
function dbloop(param, cb) {
//dosomething
cb();
}
In this flow, a combination of loops, sequential, parallel and conditional async callback is being used due to the need of the business logic. It become quite difficult to troubleshoot if any function is not returning a callback for any reason. I tried to debug using logging framework but obviously it's not very efficient due to the asynchronous nature of node.js.
Is there any good way to
Improve the flow in coding
Identify in which function the flow stops.
Thanks!
Few ways to improve debugging
Use vasync module It's like async but with better debugging
vasync may not have all the tools you need so promisify (bluebird) all callbacks and use promises so that functions will return at least .then and .catch
For example, in request module:
var request = require('request');
function makeRequest(cb) {
request(url, function(err, res, body) {
cb(err, body);
});
}
becomes
var Promise = require('bluebird');
var request = Promise.promisify(require("request"));
Promise.promisifyAll(request);
function makeRequest(cb) {
request(url)
.then(function(response) {
var body = response.body;
cb(null, body);
})
.catch(cb);
}
Just pass all the success in .then and errors in .catch and pass errors back to the the highest level.
I'm trying to get data from an existing mongo database, and for each document I get, I need to make 2 requests to two different services, github and twitter.
The code works if I put a limit in my cursor, but it stops working when I increase, or remove the limit. I think it is because I'm making too many concurrent requests to either github or twitter or both. I get the message
{ [Error: read ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
I'm not sure how to fix this.
col.find({}).limit(100).forEach(function (doc) {
var options = {};
var params = {};
request(options, function (err, response, body) {
request (options, function (err, response, body){
// Stuff
});
});
}, function (err) {
if (err) {
console.log(err);
return;
}
db.close();
})
You are basically starting 100 (or the # of documents that your query results in) HTTP requests in parallel.
The very useful async module has a function eachLimit to limit the number of concurrent (running in parallel) asynchronous tasks. In your case, you could leverage it like so:
var async = require('async');
col.find({}).limit(100).toArray(function(err, docs) {
if (err) return console.log(err);
// Limit the # of concurrent HTTP requests to 2(-ish).
async.eachLimit(docs, 2, function(doc, asyncdone) {
request(options, function (err, response, body) {
if (err) return asyncdone(err);
request (options, function (err, response, body){
if (err) return asyncdone(err);
// Stuff
return asyncdone();
});
});
}, function(err) {
// If we get here, we're done.
if (err) {
console.log(err);
return;
}
db.close();
});
});
Be aware that .toArray() reads all the query results into memory first (but your .forEach() does that as well I think).
Should be caused by too many request calls, you can use the async library to do request limit.
mapLimit(arr, limit, iterator, [callback])
it's very easy and trivial to use.
I have a problem with a jQuery Ajax request that does not respond when my NodeJS server crushes.
function postRequest(url, query, onComplete) {
$.post(url, query)
.done(function (response) {
onComplete(false, response);
})
.fail(function (xhr) {
onComplete(true, xhr.status);
});
}
postRequest('/noderoute', {id: 4}, function (error, res) {
if (!error) {
$('#content').html(res);
}
else {
alert(res);
}
});
If the server responds, all is good and well but if it crushes nothing happens. How can I solve this? Thank you.
Edit: Solved
I have been running my tests on a production build instead of my development files. Everything working fine now.
You are passing an incorrect xhr argument to your fail call.
.fail(xhr, function () {
should be
.fail(function (xhr) {