Does Express.js support sending unbuffered progressively flushed responses? - node.js

Perl's Catalyst framework permitts you to send an progressively flushed response over an open connection. You could for instance use write_fh() on Catalyst::Response. I've begun using Node.js, and I can't find how to do the equivalent.
If I want to send a big CSV file, on the order of 200 megs is there a way to do that without buffering the whole CSV file in memory? Granted, the client will timeout if you don't send data in a certain amount of time, so a promise would be nice if -- but is there anyway to do this?
When I try to do a res.send(text) in a callback, I get
Express
500 Error: This socket has been ended by the other party
And, it doesn't seem that Express.js supports an explicit socket.close() or anything of the ilk.
Here is an example,
exports.foo = function (res) {
var query = client.query("SELECT * FROM naics.codes");
query.on('row', function(row) {
//console.log(row);
res.write("GOT A ROW");
});
query.on('end', function() {
res.end();
client.end();
});
};
I would expect for that to send "GOT A ROW" out for each row, until the call to client.end() signifying completion.

Express is built on the native HTTP module, which means res is an instance of http.ServerResponse, which inherits from the writable stream interface. That said, you can do this:
app.get('/', function(req, res) {
var stream = fs.createReadStream('./file.csv');
stream.pipe(res);
// or use event handlers
stream.on('data', function(data) {
res.write(data);
});
stream.on('end', function() {
res.end();
});
});
The reason you can't use the res.send() method in Express for streams is because it will use res.close() automatically for you.

Related

Wait for an event to happen before sending HTTP response in NodeJS?

I'm looking for a solution to waiting for an event to happen before sending a HTTP response.
Use Case
The idea is I call a function in one of my routes: zwave.connect("/dev/ttyACM5"); This function return immediately.
But there exists 2 events that notice about if it succeed or fail to connect the device:
zwave.on('driver ready', function(){...});
zwave.on('driver failed', function(){...});
In my route, I would like to know if the device succeed or fail to connect before sending the HTTP response.
My "solution"
When an event happen, I save the event in a database:
zwave.on('driver ready', function(){
//In the database, save the fact the event happened, here it's event "CONNECTED"
});
In my route, execute the connect function and wait for the event to
appear in the database:
router.get('/', function(request, response, next) {
zwave.connect("/dev/ttyACM5");
waitForEvent("CONNECTED", 5, null, function(){
response.redirect(/connected);
});
});
// The function use to wait for the event
waitForEvent: function(eventType, nbCallMax, nbCall, callback){
if(nbCall == null) nbCall = 1;
if(nbCallMax == null) nbCallMax = 1;
// Looking for event to happen (return true if event happened, false otherwise
event = findEventInDataBase(eventType);
if(event){
waitForEvent(eventType, nbCallMax, nbCall, callback);
}else{
setTimeout(waitForEvent(eventType, callback, nbCallMax, (nbCall+1)), 1500);
}
}
I don't think it is a good practice because it iterates calls over the database.
So what are your opinions/suggestions about it?
I've gone ahead and added the asynchronous and control-flow tags to your question because at the core of it, that is what you're asking about. (As an aside, if you're not using ES6 you should be able to translate the code below back to ES5.)
TL;DR
There are a lot of ways to handle async control flow in JavaScript (see also: What is the best control flow module for node.js?). You are looking for a structured way to handle it—likely Promises or the Reactive Extensions for JavaScript (a.k.a RxJS).
Example using a Promise
From MDN:
The Promise object is used for asynchronous computations. A Promise represents a value which may be available now, or in the future, or never.
The async computation in your case is the computation of a boolean value describing the success or failure to connect to the device. To do so, you can wrap the call to connect in a Promise object like so:
const p = new Promise((resolve) => {
// This assumes that the events are mutually exclusive
zwave.connect('/dev/ttyACM5');
zwave.on('driver ready', () => resolve(true));
zwave.on('driver failed', () => resolve(false));
});
Once you have a Promise representing the state of the connection, you can attach functions to its "future" value:
// Inside your route file
const p = /* ... */;
router.get('/', function(request, response, next) {
p.then(successful => {
if (successful) {
response.redirect('/connected');
}
else {
response.redirect('/failure');
}
});
});
You can learn more about Promises on MDN, or by reading one of many other resources on the topic (e.g. You're Missing the Point of Promises).
Have you tried this? From the look of it, your zwave probably have already implemented an EventEmmiter, you just need to attach a listener to it
router.get('/', function(request, response, next) {
zwave.connect("/dev/ttyACM5");
zwave.once('driver ready', function(){
response.redirect(/connected);
});
});
There is a npm sync module also. which is used for synchronize the process of executing the query.
When you want to run parallel queries in synchronous way then node restrict to do that because it never wait for response. and sync module is much perfect for that kind of solution.
Sample code
/*require sync module*/
var Sync = require('sync');
app.get('/',function(req,res,next){
story.find().exec(function(err,data){
var sync_function_data = find_user.sync(null, {name: "sanjeev"});
res.send({story:data,user:sync_function_data});
});
});
/*****sync function defined here *******/
function find_user(req_json, callback) {
process.nextTick(function () {
users.find(req_json,function (err,data)
{
if (!err) {
callback(null, data);
} else {
callback(null, err);
}
});
});
}
reference link: https://www.npmjs.com/package/sync

Cancelling piping files with request module throw ESOCKETTIMEDOUT

I'm doing a simple request with express & the request module and piping its response to res:
var pipe = request.get({
url: 'url-to-file'
});
pipe.on('response', function (response) {
req.on('close', function () {
// these methods won't work:
// pipe.unpipe();
// pipe.end();
// pipe.finish();
// pipe.close();
});
pipe.on('end', function () {
// this will never fire if I cancel the request
});
res.writeHead(response.statusCode, response.headers);
pipe.pipe(res);
});
This works like a charm, except if I cancel downloads. The end event won't fire and some seconds later, an ESOCKETTIMEDOUT error gets thrown.
How can I close the pipe? These node docs claim that I can call .unpipe, but all node gives is pipe.unpipe is not a funtion (tested with v0.12.7 & 4.2.2 & & 5.0.0), probably because it's not an original node stream.
I also tried using events like end, finish and close, but neither of them work.
request.get() does not return a pure node.js stream but rather a Request object which inherits from the native Stream class and adds some custom methods. The method you are looking for is Request#abort() (Source link).
Your code example would look like the following:
var pipe = request.get({
url: 'url-to-file'
});
pipe.on('response', function (response) {
req.on('close', function () {
pipe.abort();
});
pipe.on('end', function () {
// this will never fire if I cancel the request
});
res.writeHead(response.statusCode, response.headers);
pipe.pipe(res);
});

node.js: How to sync socket receive in 2 different routes

I am still learning node.js basics. My flow is like this,
browser<-->node<-->backend server doing calculation.
node and backend uses socket to communicate.
From the browser there are start/stop buttons to ask backend to start/stop the
calculation.
When node asks backend to start/stop, it must query to see if backend is
alive first.
My code is like this -
app.get('/stopCmd', function(req, res)
{
socketToBackendServer.write("status", function() {
console.log("Sending:", 'Node asking for STATUS');
});
socketToBackendServer.on("data", function() {
if(status is ok) // pseudo code
{
socketToBackendServer.write("stop", function() {
console.log("Sending:", 'Node sending STOP');
});
} else {
console.log("backend server is NOT ready");
}
});
});
app.get('/startCmd', function(req, res)
{
// do similar things as stopCmd
});
/////////////////////////////////////////////////
var socketToBackendServer = net.connect(2899);
function openSocket() {
socketToBackendServer.setKeepAlive(true);
socketToBackendServer.on('connect', onConnect.bind({}, socketToBackendServer));
socketToBackendServer.on('error', onError.bind({}, socketToBackendServer));
}
function onConnect(socket) {
var myData;
console.log('Socket is open!');
socket.on('data', function(data) {
console.log('Received:', data);
io.emit('time', { time: data.toJSON() });
});
}
function onError(socket) {
console.log('Socket error!');
// Kill socket
clearInterval(interval);
socket.destroy();
socket.unref();
// Re-open socket
setTimeout(openSocket, 1e3);
}
openSocket();
server.listen(7778);
if using the same browser, if i go crazy clicking start/stop... for the "
stopCmd", how to make sure when it queries "status", the response is caught
by its function, not "startCmd"'s ?
it's this line
socketToBackendServer.on("data", function()
Thank you again !
You can use multiple connections to the backend server, so one function can freely use one channel, the responses won't mix.
Or you can use a multiplexer function, that you call from both of your functions:
It could work if you can identify your requests, like you send and id with the status, for example socketToBackendServer.write("status 1", ... , and you send the id with the status response back from the backend server (if it yours). In this way you can send multiple requests at the same time, and when the response come, you can identify it, and call the callback function that you stored in an array with the ids.
You only send one request, and you wait for the response before you send another one. You must use a waiting queue, where you store the request, and the callback functions.

socket.emit issue into post method

i want to send datas in same client (not all clients) with this code;
app.post("/search", function(req, res) {
//some other codes
//if database saved {
io.sockets.emit('preview-post', {title: "blabla" });// io variable was declared as GLOBAL
// } database saved end.
res.send({bla:bla});// response end before database saving process
});
this sample is working ! but it sends to all clients , How can i emit data to same opened browser(same client) ?
Second Question is: Are there any alternative ways to do this scenario?
My algorithm is post method fired > async call to an api > response end and page loaded on client > async call to an api is still continue > if async call is finished > send alert to client . But How? i wanted to do it wiht socket .io , if i use your 3.part , it'll work , can i do this scenario any other way?
This indeed sends to all sockets.
There are a couple ways to achieve what you are looking to do. The first way is to do something like this on the server:
io.on('connection', function(socket) {
socket.on('search', function(*/ client supplied arguments /*){
socket.emit('preview-post', {title: "blabla" });
});
});
If you are insistent on using a post request, and then sending it back to the client, there are two ways to achieve this.
The easiest way, if you only need to respond to this response, is just send a standard response from node, and let the client handle it:
res.send({
event: 'preview-post',
payload: {title: "blabla" }
});
This removes socket.io's event system, so if you are insistent on using socket.io to send this event back to the same client, you are going to need to use cookies. Express and the module cookie-parser make this easy for you.
Once you have this setup, inside your request you could do something like this:
app.post("/search", function(req, res) {
var socket = findSocketByCookie(req.cookies.myUniqueCookie);
socket.emit('preview-post', {title: "blabla" });
});
function findSocketByCookie(cookie) {
for(var i in io.sockets.connected) {
var socket = io.sockets.connected[i];
if(socket.handshake.headers.cookie.indexOf(cookie) !== -1){
return socket;
}
}
}

Node pipe stops working

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

Resources