Stop downloading the data in nodejs request - node.js

How can we stop the remaining response from a server -
For eg.
http.get(requestOptions, function(response){
//Log the file size;
console.log('File Size:', response.headers['content-length']);
// Some code to download the remaining part of the response?
}).on('error', onError);
I just want to log the file size and not waste my bandwidth in downloading the remaining file. Does nodejs automatically handles this or do I have to write some special code for it?

If you just want fetch the size of the file, it is best to use HTTP HEAD, which returns only the response headers from the server without the body.
You can make a HEAD request in Node.js like this:
var http = require("http"),
// make the request over HTTP HEAD
// which will only return the headers
requestOpts = {
host: "www.google.com",
port: 80,
path: "/images/srpr/logo4w.png",
method: "HEAD"
};
var request = http.request(requestOpts, function (response) {
console.log("Response headers:", response.headers);
console.log("File size:", response.headers["content-length"]);
});
request.on("error", function (err) {
console.log(err);
});
// send the request
request.end();
EDIT:
I realized that I didn't really answer your question, which is essentially "How do I terminate a request early in Node.js?". You can terminate any request in the middle of processing by calling response.destroy():
var request = http.get("http://www.google.com/images/srpr/logo4w.png", function (response) {
console.log("Response headers:", response.headers);
// terminate request early by calling destroy()
// this should only fire the data event only once before terminating
response.destroy();
response.on("data", function (chunk) {
console.log("received data chunk:", chunk);
});
});
You can test this by commenting out the the destroy() call and observing that in a full request two chunks are returned. Like mentioned elsewhere, however, it is more efficient to simply use HTTP HEAD.

You need to perform a HEAD request instead of a get
Taken from this answer
var http = require('http');
var options = {
method: 'HEAD',
host: 'stackoverflow.com',
port: 80,
path: '/'
};
var req = http.request(options, function(res) {
console.log(JSON.stringify(res.headers));
var fileSize = res.headers['content-length']
console.log(fileSize)
}
);
req.end();

Related

Send new request response with Node HTTP server

Stack Overflow community, greetings. I'm trying to pass the response of a new request on the request object using the Node HTTP Module for a basic autocomplete search app for my website (i.e using Node as a proxy that will transform and redirect the requests within the server).
The flow basically is:
Client Browser - Node - ElasticSearch - Node - Client Browser
I've started with:
Listen to requests with http.createServer (function (req,res)
Get the body from the req object and use it in a new request with http.request(options, function (newReqResponse)
Get the body from that newReqResponse object and send it back to the client on the res object
The problem is that the content of newReqResponse is always outdated (trails behind the last typed character). i.e.:
If I type "te", the content of newReqResponse corresponds to that if I had typed only "t".
If I type "test", it corresponds to that if I had typed "tes".
And so on.
I've tried to solve it using Node.js streams and using the file system module to write and read files sync and async, but the result is the same. Here's a sample of the whole -code- picture:
var http = require('http');
var fs = require('fs');
http.createServer(function (req, res) {
var reqBody = '';
var newReqResponseBody = "";
req.on('data', function (chunk) {
reqBody += chunk;
fs.writeFile('reqbody.json', reqBody, function(err) {
if (err) {
throw err;
}
});
var options = {
hostname: '127.0.0.1',
port: 9500,
method: 'GET',
path: '/_search',
headers: { host: 'es',
'content-length': Buffer.byteLength(reqBody),
'content-type': 'application/json',
accept: 'application/json' },
};
var newReq = http.request(options, function (newReqResponse) {
newReqResponse.setEncoding("UTF-8");
newReqResponse.on('data', function (ch) {
newReqResponseBody += ch;
});
newReqResponse.on("end", function() {
fs.writeFile("newReqResponseBody.json", newReqResponseBody, function(err) {
if (err) {
throw err;
}
});
});
});
newReq.on("error", function(err) {
console.log(`problem with request: ${err.message}`);
});
newReq.write(reqBody);
newReq.end();
});
req.on('end', function() {
var responseBody = fs.readFileSync('newReqResponseBody.json', 'utf8');
console.log(responseBody);
res.end(responseBody);
});
}).listen(3000, '127.0.0.1');
Is there a workaround to work with requests and responses within the http server? If there isn't, I'll be very grateful if you give me any directions on how to solve this.
Since the planned use for Node is rather basic, I prefer to stick with core modules rather than having to get new ones from npm, unless that it's necessary.
Thanks in advance.
EDIT:
All I had to do was to call res.end(responseBody) within the newReqResponse.on("end") callback, which is totally counterintuitive for me, but... it works.
Glad you solved your own problem. However, I see room for improvement (not sure if you're new), especially if you're transferring data. Which you can do with streams.
You can see that I didn't calculate the content length, you're asked not to and should get ignore (for this specific case) according to HTTP specification as streams pass data in chunks with 'Transfer-Encoding': 'chunked' header.
const fs = require('fs');
const http = require('http');
const options = {
hostname: '127.0.0.1',
port: 9500,
method: 'GET',
path: '/_search',
headers: {
'Content-Type': 'application/json'
}
};
http.createServer((req, res) => {
req.pipe(fs.createWriteStream('reqBody.json'));
let request = http.request(options, (newRes) => {
newRes.pipe(res);
});
fs.createReadStream('reqBody.json').pipe(request);
res.setHeader('Content-Type', 'application/json');
}).listen(3000, '127.0.0.1');
You can shorten this snippet more if you don't want your data saved in the future and only want to pipe the req stream to request.

DELETE request to REST API returns 500

function delete(id, response) {
var https = require('https');
var linkpath = "/v1/endpoint/" + id + "/?token=" + AUTH_KEY;
var req = https.request({
hostname: 'api.foo.com',
port: 443,
path: linkpath,
agent: false,
method: 'DELETE',
}, (res) => {
if (res.statusCode !== 200) {
response.send('HTTP ' + res.statusCode + ' ' + res.statusMessage);
}
res.on('error', function (err) {
response.send(err);
});
res.on('end', function (data) {
response.send(data);
});
});
req.on('error', function(e) {
response.send(e.message);
});
req.end();
}
This code, adapted from my (working) code that uses a POST request to do other things with this API, nets me a status code of 500 from the endpoint.
I don't know how to debug this. I can't send the URL manually to the server because it's a DELETE operation instead of a GET or POST.
Has anyone seen this problem? Or do you have ideas on how to debug it?
Postman (https://www.getpostman.com/) is a great tool for manually sending specific HTTP requests, including DELETE!
There are all sorts of tools that will let you manually send any HTTP to the server. For instance, you can get quite a bit of information with curl, which will happily send a DELETE request.
For example:
curl -v -X "DELETE" https://jsonplaceholder.typicode.com/posts/1
will return the request and response headers as well as the body of the return value if any.

Node.js Express application taking too much time to process outbound HTTP requests under load testing

I have an express (node.js v0.10.29) proxy on a linux machine (Ubuntu 12.04 64bit, 3.75GB Memory, 1 Core) which is making some outbound HTTP requests for each incoming request.
While load testing I found out that the response time becomes very slow (around 30 seconds for a request that making 4 outbound requests when firing 1000).
After some investigation I made sure that the outbound requests are the bottleneck and eliminated the machine limitation (the cpu and memory are not getting higher then 20%, and I increased the number of open files to 10000).
First I was using request module for the outbound request, tried changing it to http module, and for both of them tried increasing the globalAgent.maxSockets, using agent = false, using my own agent with any number of maxSockets, setting request.setNoDelay(true), using cluster, but nothing made any change on the results of my load testing.
what can be the problem?
Here is my latest code for the HTTP request:
var http = require('http');
var agent = new http.Agent();
agent.maxSockets = 100;
var doPost = function(reqUrl, body, next, onError) {
var stringBody = JSON.stringify(body);
var headers = {
'Content-Type': 'application/json',
'Accept-Encoding': 'gzip',
'Content-Length': stringBody.length
};
var parsedUrl = url.parse(reqUrl);
var options = {
host: parsedUrl.host,
path: parsedUrl.path,
method: 'POST',
headers: headers,
//agent: false
agent: agent
};
doHttpRequest(options, stringBody, next, onError);
};
function doHttpRequest(options, body, next, onError, HttpContext){
var req = http.request(options, function(res) {
var chunks = [];
res.on('data', function (chunk) {
chunks.push(chunk);
});
res.on('end', function(){
var buffer = Buffer.concat(chunks);
var encoding = res.headers['content-encoding'];
if (encoding == 'gzip') {
zlib.gunzip(buffer, function(error, decoded) {
var jsonRes = JSON.parse(decoded && decoded.toString());
next(jsonRes);
});
} else if (encoding == 'deflate') {
zlib.inflate(buffer, function(error, decoded) {
var jsonRes = JSON.parse(decoded && decoded.toString());
next(jsonRes);
});
} else {
next(null, buffer.toString());
}
});
});
req.setNoDelay(true);
req.write(body);
req.end();
req.on('error', function(e) {
log(e);
});
}
the "next" method will call the "doPost" function a few times (in this case 4 times).
I am seeing the same behaviour, I have simple proxy module created. When I call a endpoint directly its taking 50ms, but via proxy server (which internal make another req and pipe to original req) it taking double ~100ms. I have also tried all options what you mentioned here.
I am glad I am not alone having this issue, will be digging into further. Will let you know if found something.
UPDATE
After setting keepAlive=true on agent, I was able to get result for proxied request with ~5ms more than direct call.

How to return NodeJS HTTP Request errors without waiting for timeout?

I have an application based on NodeJS/Express and AngularJS which talks to a, application server via REST API. In the event that the application server is not running, I would like to immediately return an error to the AngularJS client that the calls are failing.
Here is what I currently have:
var jsonObject = JSON.stringify(input);
var postHeaders = {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(jsonObject, 'utf8')
};
var options = {
host: '127.0.0.1',
port: 7777,
path: path,
method: method,
headers: postHeaders
};
var appServerRequest = http.request(options, function(appServerResult) {
console.log('STATUS: ' + appServerResult.statusCode);
console.log('HEADERS: ' + JSON.stringify(appServerResult.headers));
appServerResult.setEncoding('utf8');
var responseDataString = '';
appServerResult.on('data', function(chunk) {
responseDataString += chunk;
console.log('BODY: ' + chunk);
});
appServerResult.on('end', function() {
callback(responseDataString);
});
appServerResult.on('error', function(e) {
console.log('** Result ERROR in appServerResponse');
console.log(e);
});
});
appServerRequest.on('response', function(response) {
console.log('Response: ' + response);
});
appServerRequest.on('error', function(e) {
console.log('** Request ERROR in appServerRequest');
console.log(e);
});
appServerRequest.write(jsonObject);
appServerRequest.end();
As you can see, I'm listening to the 'error' events on both the Request and Response objects. When a call is made and the application server is not running, the Request error handler is called as expected. However, I haven't been able to figure out how to take that error and return it to the client. A response object is eventually returned, but only after the timeout expires. It seems like there should be a way to return a Response and specify an appropriate HTTP Status code as soon as I detect the error. I could do it if I had a response object (of course), but I don't get one until the timeout expires.
I know I must be missing something simple, but I can't figure out what it is.
You mention you're using express. Simply call res.send(500) to end the request with an error code (in this case 500)

408 Timeout in NodeJS app requesting Github API

Following the documentation of the Github API to create an authorization for a NodeJS app.
I have the following code:
var _options = {
headers: {
'User-Agent': app.get('ORGANISATION')
},
hostname: 'api.github.com'
};
var oauth2Authorize = function () {
var path = '/authorizations?scopes=repo';
path += '&client_id='+ app.get('GITHUB_CLIENT_ID');
path += '&client_secret='+ app.get('GITHUB_CLIENT_SECRET');
path += '&note=ReviewerAssistant';
_options.path = path;
_options.method = 'POST';
var request = https.request(_options, function (response) {
var data = "";
response.on('data', function (chunk) {
data += chunk;
});
response.on('end', function () {
console.log(data);
});
});
request.on('error', function (error) {
console.log('Problem with request: '+ error);
});
};
And all I get is:
408 Request Time-out
Your browser didn't send a complete request in time.
Doing a GET request works though.
http.request() doesn't immediately send the request:
With http.request() one must always call req.end() to signify that you're done with the request - even if there is no data being written to the request body.
It opens the underlying connection to the server, but leaves the request incomplete so that a body/message can be sent with it:
var request = http.request({ method: 'POST', ... });
request.write('data\n');
request.write('data\n');
request.end();
And, regardless of whether there's anything to write() or not, you must call end() to complete the request and send it in its entirety. Without that, the server will eventually force the open connection to close. In this case, with a 408 response.

Resources