Stop multiple async calls from client Node JS Angular - node.js

I have a single page app that gets a list of objects from an node js server. However if the user selects the first item in the list before the rest of the related objects are loaded it needs to cancel the request. I can cancel the request on the client side (angular) but don't see how to cancel a request on an already started call in node !
The code below is the method i use to call the node api with the ability to cancel that request on the client side.
getDiscovery : function(ids) {
var Ids = [],
apiUrl = '/api/Something/ids',
self = this,
deferredAbort = $q.defer();
// Initiate the AJAX request.
var request = $http({
method: "get",
url: apiUrl,
params: {
ids: JSON.stringify(ids)
},
timeout: deferredAbort.promise
});
var promise = request.then(
function(response) {
return(response.data);
},
function(response) {
return($q.reject( "Something went wrong" ));
}
);
promise.abort = function() {
deferredAbort.resolve();
};
promise.finally(
function() {
console.info( "Cleaning up object references." );
promise.abort = angular.noop;
deferredAbort = request = promise = null;
}
);
return(promise);
}
This is the method on the node api which needs to be cancelled
exports.getItemsDiscovery = function(req, res){
var Items = JSON.parse(req.query.ids);
async.each(Items,
function(Item, callback){
method.getSomething(Item.Id, function(data){
console.log('Got data for', item.Id);
callback();
});
},
function(err) {
return res.send(Items);
});
};
The route is
app.get('/api/ids', something.getItemsDiscovery);
Any help would be much appreciated
Rob

Related

Ajax call to API with parameter

I have an api route per below:
apiRouter.get('/api/getCompTeams', function(req, res) {
var compTeams = Team.find({}, {competition: 1, team:1, _id:0} ).then(eachOne => {
res.json(eachOne);
},
(fail)=> {
console.log('Error!');
},
(proceed) => {
return compTeams;
});
});
This returns all teams in the Teams collection, with the below ajax call:
async function ajaxData(url) {
var dataResults ;
try {
// AJAX CALL FOR DATA
dataResults = await $.ajax({
method: 'GET',
url: url,
dataType: 'json',
}); //AJAX CALL ENDS
return dataResults;
} // try ENDS
catch (err) {
console.log("error # ajaxData");
console.log(err.message);
} // catch ENDS
} // ajaxData ENDS
What I would like to do is to pass a parameter, with the ajax call, eg. 'La Liga', so that the apiRoute returns only teams with key:value that matches 'competition:La Liga'. I have tried various options but none have succeeded.
Any help greatly appreciated.
Using NodeJS & MongoDB.
Mosiki.

Express API timeout despite success callback

I have an express API:
var bodyParser = require("body-parser");
app.use(bodyParser.json());
app.post("/adapter/mail", function(request, response) {
var body = request.body;
var id = body.id;
var params = {id: id};
Parse.Cloud.run("email", params, {
success: function(e) {
console.log("api: success");
respone.status(200).send("e");
},
error: function(e) {
console.log("api: error: " + JSON.stringify(e));
response.status(500).send(e);
}
});
});
Calling the API calls the Parse Cloud Code:
Parse.Cloud.define("email", function(request, response) {
console.log(JSON.stringify(request, null, 4));
response.success("ok");
});
In the console I see that console.log("api: success"); gets executed correctly, but the API request does not end, it times out despite the successful callback.
When Cloud Code returns response.error("error"); instead of response.success("ok"); the request does not timeout but ends immediately.
Why does the request time out on success?
Looks like you are missing an "s" on your callback
respon"s"e.status(200).send("e");

Sending multiple HTTP requests in Node.js: Not receiving any responses or timeouts

I am trying to process Wikipedia articles, and want to receive a list of all Wikipedia articles. In order to do this I am frequently sending http requests to the Wikipedia API, which allows you to receive 500 titles at time and also returns an apcontinue string, which, when used in the following request, returns title starting from that string.
In order to do this, I am using the agentkeepalive module:
var http = require('http');
var Agent = require('agentkeepalive');
var keepaliveAgent = new Agent({
keepAlive: true,
maxSockets: 5,
timeout: 5000,
keepAliveTimeout: 3000
});
To send an http request to Wikipedia, I use the following code:
function wikipediaApiCall(params, callback) {
var options = {
host: 'en.wikipedia.org',
path: '/w/api.php?' + createParamString(params),
method: 'GET',
agent: keepaliveAgent
};
var callbackFunc = function(response) {
var err;
var str = '';
if (('' + response.statusCode).match(/^5\d\d$/)) {
err = new Error('Server error');
}
//another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
response.on('error', function (e) {
err = new Error('Request error');
});
response.on('timeout', function () {
err = new Error('Timeout');
response.abort();
callback(err);
});
response.on('end', function () {
var obj = JSON.parse(str);
if (obj.warnings) {
err = new Error('Request error');
}
callback(err, obj);
});
}
var req = http.request(options, callbackFunc);
req.setTimeout(5000);
req.on('error', function(err) {
callback(err, null);
return;
});
req.on('timeout', function () {
err = new Error('Timeout');
response.abort();
callback(err);
});
req.on('finish', function(){
console.log('ended');
});
req.end();
}
However, after sending between 16 and 20 request, I am not getting any response, but my request also does not time out.
Any ideas why this is happening?
Update
The request I send to Wikipedia contains the following parameters:
var params = {
list: 'allpages',
aplimit: limit,
apfrom: from,
continue: cont,
// apfilterredir: 'nonredirects'
};
Interestingly, after leaving out the nonredirects setting, I was able to send and receive up to 330 requests, but no more than that.
Update 2
I was able to register a finished event. It appears to be fired for the request that is failing as well. I modified the code accordingly.
Perhaps you need a bot flag to have higher API limits. Maybe there are too many requests in parallel; WMF recommendation is to make requests serially in case of such big tasks. Also, you should use the maxlag parameter with low values, per WMF API Etiquette.

Node.js request web page

I need to connect to a web page and return the status code of the page, which I've been able to achieve using http.request however the pages I need to request can take a long time, sometimes several minutes, so I'm always getting a socket hang up error.
I'm using the following code so far:
var reqPage = function(urlString, cb) {
// Resolve the URL
var path = url.parse(urlString);
var req = http.request({
host: path.hostname,
path: path.pathname,
port: 80,
method: 'GET'
});
req.on('end', function() {
cb.call(this, res);
});
req.on('error', function(e) {
winston.error(e.message);
});
};
What do I need to do to ensure that my application still attempts to connect to the page even if it's going to take a few minutes?
Use the request module and set the timeout option to an appropriate value (in milliseconds)
var request = require('request')
var url = 'http://www.google.com' // input your url here
// use a timeout value of 10 seconds
var timeoutInMilliseconds = 10*1000
var opts = {
url: url,
timeout: timeoutInMilliseconds
}
request(opts, function (err, res, body) {
if (err) {
console.dir(err)
return
}
var statusCode = res.statusCode
console.log('status code: ' + statusCode)
})
Add this if you don't want to use a higher level http client like request or superagent , then add this...
req.on("connection", function(socket){
socket.setTimeout((1000*60*5)); //5 mins
});

Node.js+Mongodb, two ajax request at the same time, one is suspended

Here is my environment:
Mac OS X: 10.7.5
Node.JS: v0.8.14
Mongodb: v2.0.6 + node-mongodb-native driver
Redis: 2.6.3
language: coffee-script
Here is the thing:
when the index page loads, two ajax requests will be sent:
var xhr1 = $.ajax({
url: '/a',
'dataType': 'json'
'success': function(){}
);
xhr1.done(function(){...})
var xhr2 = $.ajax({
url: '/b',
'dataType': 'json'
'success': function(){}
);
xhr2.done(function(){...})
both of then will query Mongodb, like this:
app.get('/a', functionA);
app.get('/b', functionB);
functionA = function() {
var collectionA = new mongodb.Collection(client, 'a');
collectionA.find().toArray(function(err, arrayA){
var collectionB = new mongodb.Collection(client, 'b');
collectionB.find().toArray(function(err, ArrayB) {
});
});
}
functionA = function() {
var collectionB = new mongodb.Collection(client, 'b');
collectionB.find().toArray(function(err, arrayB){
});
}
collectionB in two requests are the same collection
each time, one request(1st or 2nd) was suspended, no response, no error. from the logs, I can see that request was stuck at the find() method. what's wired is after about two minutes, the page sent the same request again(I guess JQuery did that), then got the response.
in these two request, no redis request.
is it because mongodb? concurrent problem?

Resources