Catching ECONNREFUSED in node.js with http.request? - node.js

I'm trying to catch ECONNREFUSED errors when using a HTTP client in node.js. I'm making requests like this:
var http = require('http');
var options = { host: 'localhost', port: '3301', path: '/', method: 'GET' };
http.request(options).on('response', function (res) {
// do some stuff
});
I can't figure out how to catch this error:
Error: connect ECONNREFUSED
at errnoException (net.js:614:11)
at Object.afterConnect [as oncomplete] (net.js:605:18)
If I do request.on('error', function () {});, it doesn't catch it. If I do it like this:
var req = request.on(etc)
req.on('error', function blah () {});
Then I get TypeError: Object false has no method 'on'.
Do I really have to do a top-level uncaught error thing to deal with this? At the moment whatever I do my whole process quits out.
Edit: I found some blog posts on how to do it by creating a connection object, calling request on that, and then binding to errors on the connection object, but doesn't that make the entire http.request() shortcut useless?

Any reason you're not using http://nodejs.org/docs/v0.6.5/api/http.html#http.request as your base? Try this:
var req = http.request(options, function(res) {
// Bind 'data', 'end' events here
});
req.on('error', function(error) {
// Error handling here
});
req.end();

Each call to http.request() returns its self.
So try it like this...
http.request(options.function(){}).on('error',function(){}).end();

I've got a solution for this, having tried all the suggestions on this (and many other) pages.
My client needs to detect a turnkey product that runs embedded windows. The client is served from a different machine to the turnkey.
The turnkey can be in 3 states:
turned off
booted into windows, but not running the turnkey app
running the turnkey app
My client sends a 'find the turnkey product' GET message to my nodejs/express service, which then tries to find the turnkey product via http.request. The behavior for each of the 3 use cases are;
timeout
ECONNREFUSED - because the windows embedded phase of the turnkey is
refusing connections.
normal response to request (happy day scenario)
The code below handles all 3 scenarios. The trick to catching the ECONNREFUSED event was learning that its handler binds to the socket event.
var http = require('http');
var express = require('express');
var url = require('url');
function find (req, res) {
var queryObj = url.parse(req.url, true).query;
var options = {
host: queryObj.ip, // client attaches ip address of turnkey to url.
port: 1234,
path: '/some/path',
}; // http get options
var badNews = function (e) {
console.log (e.name + ' error: ', e.message);
res.send({'ok': false, 'msg': e.message});
}; // sends failure messages to log and client
// instantiate http request object and fire it
var msg = http.request(options, function (response) {
var body = '';
response.on ('data', function(d) {
body += d;
}); // accumulate response chunks
response.on ('end', function () {
res.send({'ok': true, 'msg': body});
console.log('sent ok');
}); // done receiving, send reply to client
response.on('error', function (e) {
badNews(e);
}); // uh oh, send bad news to client
});
msg.on('socket', function(socket) {
socket.setTimeout(2000, function () { // set short timeout so discovery fails fast
var e = new Error ('Timeout connecting to ' + queryObj.ip));
e.name = 'Timeout';
badNews(e);
msg.abort(); // kill socket
});
socket.on('error', function (err) { // this catches ECONNREFUSED events
badNews(err);
msg.abort(); // kill socket
});
}); // handle connection events and errors
msg.on('error', function (e) { // happens when we abort
console.log(e);
});
msg.end();
}

For those not using DNS (you can also use request instead of get by simply replacing get with request like so: http.request({ ... })):
http.get({
host: '127.0.0.1',
port: 443,
path: '/books?author=spongebob',
auth: 'user:p#ssword#'
}, resp => {
let data;
resp.on('data', chunk => {
data += chunk;
});
resp.on('end', () => console.log(data));
}).on('error', err => console.log(err));

Related

Timeout handling with node.js stream piping

I'm piping to a file an HTTPS request, it works ok 99.9% of calls, but occasionally (maybe when server or network are not available) hangs indefinitely...
This obviously cause my application to stop working and requiring a manual restart...
I have other https connections that used to occasionally hang that always complete now using the following error code on the request object, as suggested on node documentation:
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
// Handle the error...
console.error("FAILED!");
});
... but it seems that timeouts on the request are ignored if the destination is piped to a file stream, maybe I should handle an error with a timeout on the filesystem object, but the documentation is not clear if there is an event I have to wait for except for 'finish'...
Here is the sample code, I hope someone can help me:
var https = require('https'),
fs = require('fs');
var opts = {
host: 'www.google.com',
path: '/',
method: 'GET',
port: 443
};
var file = fs.createWriteStream('test.html');
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
});
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
console.error("FAILED!");
});
request.end();
If you wanna try the hang, change host and path with a huge file and disconnect the network cable during the transfer, it should time out after 10 seconds, but it doesn't...
I set up a demo node.js http server that sends a very slow answer and a client similar to your sample code.
When I start the client and then stop the server while sending the response then I also don't get a timeout event on the socket but I get a end event on the response within the client:
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
response.on('end', function() {
// this is printed when I stop the server
console.log("response ended");
});
});
```
Maybe you could listen to that event?

Sending multiple HTTP requests in Node.js: Not receiving any responses or timeouts

I am trying to process Wikipedia articles, and want to receive a list of all Wikipedia articles. In order to do this I am frequently sending http requests to the Wikipedia API, which allows you to receive 500 titles at time and also returns an apcontinue string, which, when used in the following request, returns title starting from that string.
In order to do this, I am using the agentkeepalive module:
var http = require('http');
var Agent = require('agentkeepalive');
var keepaliveAgent = new Agent({
keepAlive: true,
maxSockets: 5,
timeout: 5000,
keepAliveTimeout: 3000
});
To send an http request to Wikipedia, I use the following code:
function wikipediaApiCall(params, callback) {
var options = {
host: 'en.wikipedia.org',
path: '/w/api.php?' + createParamString(params),
method: 'GET',
agent: keepaliveAgent
};
var callbackFunc = function(response) {
var err;
var str = '';
if (('' + response.statusCode).match(/^5\d\d$/)) {
err = new Error('Server error');
}
//another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
response.on('error', function (e) {
err = new Error('Request error');
});
response.on('timeout', function () {
err = new Error('Timeout');
response.abort();
callback(err);
});
response.on('end', function () {
var obj = JSON.parse(str);
if (obj.warnings) {
err = new Error('Request error');
}
callback(err, obj);
});
}
var req = http.request(options, callbackFunc);
req.setTimeout(5000);
req.on('error', function(err) {
callback(err, null);
return;
});
req.on('timeout', function () {
err = new Error('Timeout');
response.abort();
callback(err);
});
req.on('finish', function(){
console.log('ended');
});
req.end();
}
However, after sending between 16 and 20 request, I am not getting any response, but my request also does not time out.
Any ideas why this is happening?
Update
The request I send to Wikipedia contains the following parameters:
var params = {
list: 'allpages',
aplimit: limit,
apfrom: from,
continue: cont,
// apfilterredir: 'nonredirects'
};
Interestingly, after leaving out the nonredirects setting, I was able to send and receive up to 330 requests, but no more than that.
Update 2
I was able to register a finished event. It appears to be fired for the request that is failing as well. I modified the code accordingly.
Perhaps you need a bot flag to have higher API limits. Maybe there are too many requests in parallel; WMF recommendation is to make requests serially in case of such big tasks. Also, you should use the maxlag parameter with low values, per WMF API Etiquette.

Node.js request web page

I need to connect to a web page and return the status code of the page, which I've been able to achieve using http.request however the pages I need to request can take a long time, sometimes several minutes, so I'm always getting a socket hang up error.
I'm using the following code so far:
var reqPage = function(urlString, cb) {
// Resolve the URL
var path = url.parse(urlString);
var req = http.request({
host: path.hostname,
path: path.pathname,
port: 80,
method: 'GET'
});
req.on('end', function() {
cb.call(this, res);
});
req.on('error', function(e) {
winston.error(e.message);
});
};
What do I need to do to ensure that my application still attempts to connect to the page even if it's going to take a few minutes?
Use the request module and set the timeout option to an appropriate value (in milliseconds)
var request = require('request')
var url = 'http://www.google.com' // input your url here
// use a timeout value of 10 seconds
var timeoutInMilliseconds = 10*1000
var opts = {
url: url,
timeout: timeoutInMilliseconds
}
request(opts, function (err, res, body) {
if (err) {
console.dir(err)
return
}
var statusCode = res.statusCode
console.log('status code: ' + statusCode)
})
Add this if you don't want to use a higher level http client like request or superagent , then add this...
req.on("connection", function(socket){
socket.setTimeout((1000*60*5)); //5 mins
});

Trying to test a Node.js Server process using Mocha

Fairly new to Node.js
Made an app that runs a server process and serve files (does not use express or any frameworks), Now I'm trying to unit test it.
I'm trying to use a mocha test for that... I intended to start my server process and then run requests against it to test the expected results (stats code, body content and the likes)
However it's not working properly, all the request fail to connect to the server... I'm pretty sure that the issue is because node is juts running one process loop, the server is not running "in the background" while the queries run or possibly the server is not running yet (started ASYNC) while the request are being made ?
Anyway I was wondering what was the proper way to test this, I assume that either I need to have the server run in the background (like a forked process) and/or maybe I need to find a way to wait for the server process to be "up" first but not sure how.
Or at least recommendations on testing such server process (with Mocha or other).
Thanks.
Here is example test code (Updated since original question)
var server = new Server302('./fixture/');
var instance;
describe('Tests', function() {
before(function(done) {
instance = http.createServer(function(request, response) {
console.log(request.url);
server.serve(request, response);
}).listen(8000);
instance.on("listening", function() {
console.log("started");
done();
});
});
after(function(done){
instance.close();
console.log("stopped");
done();
});
it("Should fetch test.html", function(done) {
console.log("test1");
http.get("http://localhost:8000/", function(res) {
res.on('data', function(body) {
console.log(body)
expect(body).toEqual("test");
done();
});
})
});
It seem to Execute in order but still fails with a connection error, whereas it works when testing manually with the browser:
started
test1
․․․stopped
✖ 1 of 1 tests failed:
1) Tests Should fetch test.html:
Error: connect ECONNREFUSED
at errnoException (net.js:670:11)
at Object.afterConnect [as oncomplete] (net.js:661:19)
In your before don't call done until you get the "listening" event fired by the server.
before(function(done) {
instance = http.createServer(function(request, response) {
console.log(request.url);
server.serve(request, response);
}).listen(8000);
instance.on("listening", function() {
console.log("started");
done();
});
});
That should ensure your test connections don't start before the server is ready.
See also the documentation for server.listen
Also had to deal with the body coming in chunks, here is the final thing that works, in case that helps somebody else:
var Server302 = require('../lib/server302.js'),
http = require('http'),
assert = require("assert");
var server = new Server302('./fixture/');
var instance;
describe('Tests', function() {
before(function(done) {
instance = http.createServer(function(request, response) {
server.serve(request, response);
}).listen(8100);
instance.on("listening", function() {
done();
});
});
after(function(done) {
instance.close();
done();
});
it("Should fetch test.html", function(done) {
console.log("test1");
var body = "";
http.get({host: "localhost", port:8100, path: "/"}, function(res) {
res.on('data', function(chunk) {
// Note: it might be chunked, so need to read the whole thing.
body += chunk;
});
res.on('end', function() {
assert.ok(body.toString().indexOf("<a href='/dummy.txt'>") !== -1);
assert.equal(res.statusCode, 200);
done();
});
})
});
it("Should fetch dummy.txt", function(done) {
http.get({host: "localhost", port:8100, path: "/dummy.txt"}, function(res) {
res.on('data', function(body) {
assert.equal(res.statusCode, 200);
assert.ok(body.toString().indexOf("test") === 0);
done();
});
});
});
it("Should get 404", function(done) {
http.get({host: "localhost", port:8100, path: "/qwerty"}, function(res) {
assert.equal(res.statusCode, 404);
done();
});
});
});
Using SuperTest
Here is a full and straightforward example using SuperTest and Mocha:
var server = new Server302('./fixture/');
var request = require('supertest');
describe('Tests', function() {
it('Should fetch test.html', function(done) {
request(server)
.get('/')
.expect('test', done);
});
});
SuperTest allows you to:
Request your server using SuperAgent (much easier to use than the low level http agent).
Bound your server to an ephemeral port so there is no need to keep track of ports (you can still do it manually if needed).
Use sugary expect methods that works with Mocha (or any other test framework).

How to make web service calls in Expressjs?

app.get('/', function(req, res){
var options = {
host: 'www.google.com'
};
http.get(options, function(http_res) {
http_res.on('data', function (chunk) {
res.send('BODY: ' + chunk);
});
res.end("");
});
});
I am trying to download google.com homepage, and reprint it, but I get an "Can't use mutable header APIs after sent." error
Anyone know why? or how to make http call?
Check out the example here on the node.js doc.
The method http.get is a convenience method, it handles a lot of basic stuff for a GET request, which usually has no body to it. Below is a sample of how to make a simple HTTP GET request.
var http = require("http");
var options = {
host: 'www.google.com'
};
http.get(options, function (http_res) {
// initialize the container for our data
var data = "";
// this event fires many times, each time collecting another piece of the response
http_res.on("data", function (chunk) {
// append this chunk to our growing `data` var
data += chunk;
});
// this event fires *one* time, after all the `data` events/chunks have been gathered
http_res.on("end", function () {
// you can use res.send instead of console.log to output via express
console.log(data);
});
});

Resources