express.js not streaming chunked 'text/event-stream' resposne - node.js

I'm trying to send a SSE text/event-stream response from an express.js end point. My route handler looks like:
function openSSE(req, res) {
res.writeHead(200, {
'Content-Type': 'text/event-stream; charset=UTF-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Transfer-Encoding': 'chunked'
});
// support the polyfill
if (req.headers['x-requested-with'] == 'XMLHttpRequest') {
res.xhr = null;
}
res.write(':' + Array(2049).join('\t') + '\n'); //2kb padding for IE
res.write('id: '+ lastID +'\n');
res.write('retry: 2000\n');
res.write('data: cool connection\n\n');
console.log("connection added");
connections.push(res);
}
Later I then call:
function sendSSE(res, message){
res.write(message);
if (res.hasOwnProperty('xhr')) {
clearTimeout(res.xhr);
res.xhr = setTimeout(function () {
res.end();
removeConnection(res);
}, 250);
}
}
My browser makes the and holds the request:
None of the response gets pushed to the browser. None of my events are fired. If I kill the express.js server. The response is suddenly drained and every event hits the browser at once.
If I update my code to add res.end() after the res.write(message) line It flushes the stream correctly however it then fallsback to event polling and dosen't stream the response.
I've tried adding padding to the head of the response like
res.write(':' + Array(2049).join('\t') + '\n');
as I've seen from other SO post that can trigger a browser to drain the response.
I suspect this is an issue with express.js because I had been previously using this code with nodes native http server and it was working correctly. So I'm wondering if there is some way to bypass express's wrapping of the response object.

This is the code I have working in my project.
Server side:
router.get('/listen', function (req, res) {
res.header('transfer-encoding', 'chunked');
res.set('Content-Type', 'text/json');
var callback = function (data) {
console.log('data');
res.write(JSON.stringify(data));
};
//Event listener which calls calback.
dbdriver.listener.on(name, callback);
res.socket.on('end', function () {
//Removes the listener on socket end
dbdriver.listener.removeListener(name, callback);
});
});
Client side:
xhr = new XMLHttpRequest();
xhr.open("GET", '/listen', true);
xhr.onprogress = function () {
//responseText contains ALL the data received
console.log("PROGRESS:", xhr.responseText)
};
xhr.send();

I was struggling with this one too, so after some browsing and reading I solved this issue by setting an extra header to the response object:
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Content-Encoding": "none"
});
Long story short, when the EventSource is negotiating with the server, it is sending an Accept-Encoding: gzip, deflate, br header which is making express to respond with an Content-Encoding: gzip header. So there are two solutions for this issue, the first is to add a Content-Encoding: none header to the response and the second is to (gzip) compress your response.

Related

How to consume multiple JSONs from a HTTPs keep-alive request?

I have a node web server which listens on
this.app.use('/register-receiver', (req, res) => {
const receiverId = req.query.id;
res.status(200).set({
connection: 'keep-alive',
'Cache-Control': 'no-cache',
'Content-Type': 'application/json',
});
this.receivers[receiverId] = res;
});
It periodically sends a JSON payload to clients connected to /register-receiver with
this.receivers[receiverId].write(JSON.stringify({
// some big json
}))
I have another node program which acts as a client that makes a connection to the server on startup.
const options = {
agent: false,
host: <my host>,
defaultPort: <my port>
path: `/register-receiver?id=${activeConfig.receiver.id}`
headers: {
connection: 'keep-alive',
'Content-Type': 'application/json',
}
};
http.get(options, res => {
res.setEncoding('utf8');
res.on('data', data => {
try {
const response = JSON.parse(data);
// do stuff with response
} catch(e) {
console.log(`error ${e} with ${data}`);
}
});
res.on('end', () => console.log(`connection finished`) )
})
The server needs to periodically send JSON payloads to the client. The client should receive these JSONs and do something with them. However, the problem is that large JSONs writes are chunked such that the client will receive the JSON in pieces. This breaks JSON.parse(data) and now the client doesn't know how to process the server payloads. I can't rely on res.on('end') to detect the the completion of a chunked write because this is a keep-alive request that should stay open forever.
I want to avoid designing my own protocol for combining JSON chunks because of it's complexity. It's not as simple as concatenating the strings since I could have interleaved JSON chunks if the server sends 2 large JSON payloads at the same time.
Is it possible to force the server to write the entire JSON as 1 chunk in the stream? How can I setup my client such that it establishes a "forever" connection with my server and listens for complete JSON payloads?

How to get multiple bodyResponses to ejs

I'm using a forEach loop to make a request to two different paths, but, although it does console.log() both bodyResponses, it gives error when trying to save it to render "index.ejs" with its value. (index.ejs is the template I wanna render):
manyPaths.forEach(function (element){
var signature = crypto.createHmac('sha256', apiSecret).update(verb + element.path + expires).digest('hex');
var headers = {
'content-type' : 'application/json',
'Accept': 'application/json',
'X-Requested-With': 'XMLHttpRequest',
'api-expires': expires,
'api-key': apiKey,
'api-signature': signature
};
const requestOptions = {
headers: headers,
url:'https://testnet.bitmex.com'+element.path,
method: verb
};
request(requestOptions, function(error, response, bodyResponse) {
if (error) {
console.log(error);
} else {
console.log(bodyResponse);
bodyResponse=JSON.parse(bodyResponse);
res.render("index", {bodyResponse:bodyResponse});
}
});
});
it does console.log both responses, but I get this error for render():
Error: Can't set headers after they are sent.
You can respond to a HTTP request only ONCE. So in your FOR loop, the first response is sent,and the connection is closed, and when it comes to the second iteration it fails with the error you see.
You have to wait for both calls to complete in parallel, and then kick of a function to send the response .

Setting request headers in Node.js

I have been working with node.js to set up a proxy server that will handle incoming client request and will verify that they have the proper certificates to get connected to the server.
What I want to do is to be able to add the client's certificate to their header to craft a user name, that I will pass on to the server.
function (req, res) {
//Here is the client certificate in a variable
var clientCertificate = req.socket.getPeerCertificate();
// Proxy a web request
return this.handle_proxy('web', req, res);
};
What I want to be able to do is this : req.setHeader('foo','foo')
I know that the proxy.on('proxyReq) exist, but the way the code is set up, I need to be able to use the req parameter.
Is there a way to do this?
Please let me know if I need to clarify my question.
You can craft your own http request with the headers provided in the original request plus any extra headers that you'd like by using http.request. Just receive the original request, copy the headers into the new request headers, add the new headers and send the new request.
var data = [];
var options = {
hostname: 'www.google.com',
port: 80,
path: '/upload',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': postData.length
}
};
var req = http.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
data.push(chunk);
});
res.on('end', function() {
console.log(data.join(""));
//send the response to your original request
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// Set headers here i.e. req.setHeader('Content-Type', originalReq.getHeader('Content-Type'));
// write data to request body
req.write(/*original request data goes here*/);
req.end();

How to return NodeJS HTTP Request errors without waiting for timeout?

I have an application based on NodeJS/Express and AngularJS which talks to a, application server via REST API. In the event that the application server is not running, I would like to immediately return an error to the AngularJS client that the calls are failing.
Here is what I currently have:
var jsonObject = JSON.stringify(input);
var postHeaders = {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(jsonObject, 'utf8')
};
var options = {
host: '127.0.0.1',
port: 7777,
path: path,
method: method,
headers: postHeaders
};
var appServerRequest = http.request(options, function(appServerResult) {
console.log('STATUS: ' + appServerResult.statusCode);
console.log('HEADERS: ' + JSON.stringify(appServerResult.headers));
appServerResult.setEncoding('utf8');
var responseDataString = '';
appServerResult.on('data', function(chunk) {
responseDataString += chunk;
console.log('BODY: ' + chunk);
});
appServerResult.on('end', function() {
callback(responseDataString);
});
appServerResult.on('error', function(e) {
console.log('** Result ERROR in appServerResponse');
console.log(e);
});
});
appServerRequest.on('response', function(response) {
console.log('Response: ' + response);
});
appServerRequest.on('error', function(e) {
console.log('** Request ERROR in appServerRequest');
console.log(e);
});
appServerRequest.write(jsonObject);
appServerRequest.end();
As you can see, I'm listening to the 'error' events on both the Request and Response objects. When a call is made and the application server is not running, the Request error handler is called as expected. However, I haven't been able to figure out how to take that error and return it to the client. A response object is eventually returned, but only after the timeout expires. It seems like there should be a way to return a Response and specify an appropriate HTTP Status code as soon as I detect the error. I could do it if I had a response object (of course), but I don't get one until the timeout expires.
I know I must be missing something simple, but I can't figure out what it is.
You mention you're using express. Simply call res.send(500) to end the request with an error code (in this case 500)

NodeJS/ExpressJS send response of large amount of data in 1 stream

I'm prototyping an app using the native mongo rest api where Node returns about 400K of json. I use the following to maket he request to mongo's native api and return the result:
http.request(options, function(req)
{
req.on('data', function(data)
{
console.log(data,data.rows);
response.send( 200, data );
}
);
}
)
.on('error', function(error)
{
console.log('error\t',error);
response.send(500, error);
}
)
.end();
When I hit http://localhost:8001/api/testdata via curl, the response is proper (both what is outputted to Node's console from the console.log and what is received by curl). But when I hit it via ajax in my app, the stream is…interupted, even data outputted to Node's console (Terminal) is odd: It has multiple EOFs, and the Network > response for the call in chrome's dev tools ends at the first EOF.
One other strange thing: data looks like:
{
"offset": 0,
"rows": [ … ]
}
but in neither Node nor client-side (angular) can I reference data.rows (it returns undefined). typeof data returns [object Object].
EDIT The request headers for both curl and angular (as reported by Node) are:
req.headers: {
'x-action': '',
'x-ns': 'test.headends',
'content-type': 'text/plain;charset=utf-8',
connection: 'close',
'content-length': '419585'
}
EDIT I checked response headers in both angular and curl directly (instead of from Node), annnd there's a disagreement (same output from both curl and angular directly instead of from node):
access-control-allow-headers: "Origin, X-Requested-With, Content-Type, Accept"
access-control-allow-methods: "OPTIONS,GET,POST,PUT,DELETE"
access-control-allow-origin: "*"
connection: "keep-alive"
content-length: "65401" // <---------------- too small!
content-type: "application/octet-stream"
// ^-- if i force "application/json"
// with response.json() instead of response.send() in Node,
// the client displays octets (and it takes 8s instead of 0s)
date: "Mon, 15 Jul 2013 18:36:50 GMT"
etag: ""-207110537""
x-powered-by: "Express"
Node's http.request() returns data in chunks for streaming (would be nice if they explicitly state this). Thus it's necessary to write each chunk to the body of Express's response, listen for the end of the http request (which is not really documented), and then call response.end() to actually finish the response.
var req = http.request(options, function(res)
{
res.on( 'data', function(chunk) { response.write(chunk); } );
res.on( 'end', function() { response.end(); } );
}
);
req.on('error', function(error) { … });
req.end();
Where response is Express's response the the initial client request (curl or angular's ajax call).
resp.set('content-type' , 'application/json');
const stream = db.Country.findAllWithStream();
// console.log(stream);
stream.on('data', chunk => {
stream.pipe(resp);
});
stream.on('end', () => {
console.log('\n\nEND!!!!!');
resp.end()
});

Resources