In my browser I display a lot of thumbs. If a thumb is not visible (out of the viewport), I set the src to '' (empty string). And when it is in the viewPort I set the correct src.
If the user uses his scrollbar, then the download of the thumbs that are not downloaded yet are canceled by the browser.
But on the server side, some of the requests becomes 'zombie' and can fill the sockets pool (maxSockets). After the timeout, the server will kill those sockets and the pile of un-downloaded thumbs restarts.
So I need to detect that a request is canceled by the Browser in order to end response. I tried many events, without result.
function(req, res) {
var stream;
stream = getThumb(req.file.binary.thumb);
req.on('close', function() {
return console.log("reQ.on close");
});
req.connection.on('close', function() {
return console.log('reQ.connection.on close');
});
req.on('end', function() {
return console.log('reQ.on end');
});
res.on('close', function() {
return console.log("reS.on close");
});
res.connection.on('close', function() {
return console.log('reS.connection.on close');
});
res.on('end', function() {
return console.log('reS.on end');
});
stream.on('close', function() {
return console.log('stream.on close');
});
return stream.pipe(res);
};
What is the event to listen to ?
note : I am using express, but it should make no difference.
cheers !
had similar problem, answer to this is:
req.connection.on('close', function() {
stream.destroy();
});
Related
I'm tailing a log file and stream the new lines to a websocket.
Since I have multiple logs, I let the user choose the log file and then get the details of that log.
The problem is that when I close a connection in order to see a different log, the connection does something weird, that when I start it again, it streams the data twice. If I close the connection and re-open it again, it streams 3 times the data, so on and so forth.
My package.json:
{
"socket.io": "^2.0.3",
"socket.io-client": "^2.0.3"
}
Client side
$("#detailsBtn").click(function (e) {
e.preventDefault();
$.get('/get/details', {
// some-data
}, () => {
if (socket) socket.close();
socket = io('http://localhost:4000', {forceNew: true});
socket.on('connect', () => {
console.log('connected');
});
socket.on('newLine', function (msg) {
// do-stuff
});
});
});
$('#closeBtn').click(function () {
socket.disconnect();
socket.close();
});
Server side
app.get('/details', (req, res) => {
const tail = spawn('ssh', ['root#' + req.query.srv, req.query.script]);
io.on('connection', function (socket) {
console.log(`connect ${socket.id}`);
socket.on('disconnect', () => {
console.log(`DISconnected ${socket.id}`);
});
tail.stdout.on('data', function (data) {
socket.emit('newLine', {message: data});
});
});
return res.sendStatus(200);
});
Now when simulating the button click, I expect the socket and connection to be closed, in order to make a new one.
Server console log (each time I click the button only once)
Server listening on localhost:4000
**click on detailsBtn**
GET /get/details?srv=myserver.google.com&script=%2Fusr%2Fbin%2Ftail 304 16.003 ms - -
connect YyYHFI9CARpBHaxoAAAB
**click on closeBtn**
DISconnected YyYHFI9CARpBHaxoAAAB
**click on detailsBtn**
GET /get/details?srv=myserver.google.com&script=%2Fusr%2Fbin%2Ftail 304 6.308 ms - -
connect vzfBnUPHUqYXd5qaAAAC
connect vzfBnUPHUqYXd5qaAAAC
**click on closeBtn**
DISconnected vzfBnUPHUqYXd5qaAAAC
DISconnected vzfBnUPHUqYXd5qaAAAC
**click on detailsBtn**
GET /get/details?srv=myserver.google.com&script=%2Fusr%2Fbin%2Ftail 304 4.677 ms - -
connect 3quEe5G1gFDJ2BvrAAAD
connect 3quEe5G1gFDJ2BvrAAAD
connect 3quEe5G1gFDJ2BvrAAAD
**click on closeBtn**
DISconnected 3quEe5G1gFDJ2BvrAAAD
DISconnected 3quEe5G1gFDJ2BvrAAAD
DISconnected 3quEe5G1gFDJ2BvrAAAD
What am I doing wrong?
As you see in the console logs, the connect and disconnect shows the same socketID. This indicates that the event handler is triggered many times.
From your code you define a new event handler for 'connection' every time the '/details' route is getting a request.
So a better aproach would be...
io.on('connection', function (socket) {
console.log(`connect ${socket.id}`);
socket.on('disconnect', () => {
console.log(`DISconnected ${socket.id}`);
});
tail.stdout.on('data', function (data) {
socket.emit('newLine', {message: data});
});
});
app.get('/details', (req, res) => {
const tail = spawn('ssh', ['root#' + req.query.srv, req.query.script]);
return res.sendStatus(200);
});
So the comments here directed me to the solution.
I had duplicate event handlers for both the socket and the tail.
I called the initiation of the connection each time a user clicked the button, and I spawned a tail child process each time the URL was accessed
Here is how I fixed it:
Socket
1.Moved the io.on('connection'...) outside of the app.get handler as suggested by #alex-rokabilis
2.Created an event emmiter of my own:
const events = require('events');
const eventEmitter = new events.EventEmitter();
3.Inside io.on('connection'...), instead of listening to tail.stdout event, I listened to my eventEmitter event in order to be able to use the tail outside of the app.get handler
io.on('connection', function (socket) {
eventEmitter.on('newLine', (data) => {
socket.emit('newLine', {line: data});
});
});
4.In the app.get handler, I listen to tail.stdout.on('data'... and send an eventEmitter event that would be handled inside the io object:
app.get('/details', (req, res) => {
let tail = spawn('ssh', ['root#' + req.query.srv, req.query.script]);
tail.stdout.on('data', (data) => {
eventEmitter.emit('newLine', data.toString().replace(/\n/g, '<br />'));
});
});
5.On client, I moved the io initialization outside of the ajax call, defined socket in a way I could use further in the script.
let socket = io('http://localhost:4000', {forceNew: true});
socket.on('connect', () => {
console.log('connected');
});
socket.on('newLine', function (msg) {
// do-stuff
});
$("#detailsBtn").click(function (e) {
e.preventDefault();
$.get('/get/details', {
// some-data
});
});
Tail
This was a bit hard to find, I always thought the problem is with the socket-io rather than the tail.
Inside io.on('connection'..., I added a socket listener for an event named closeConnection that emits closeConnection to my eventEmitter, that in turn kills the tail child process:
io.on('connection', function (socket) {
eventEmitter.on('newLine', (data) => {
socket.emit('newLine', {line: data});
});
socket.on('closeConnection', () =>{
console.log('got connection close from client');
eventEmitter.emit('closeConnection');
});
});
And inside the app.get controller:
app.get('/details', (req, res) => {
let tail = spawn('ssh', ['root#' + req.query.srv, req.query.script]);
tail.stdout.on('data', (data) => {
eventEmitter.emit('newLine', data.toString().replace(/\n/g, '<br />'));
});
eventEmitter.on('closeConnection', () => {
tail.stdin.pause();
tail.kill();
});
});
And in the client, each time I want to close the connection, I just:
socket.emit('closeConnection');
That was tough.
I'm piping to a file an HTTPS request, it works ok 99.9% of calls, but occasionally (maybe when server or network are not available) hangs indefinitely...
This obviously cause my application to stop working and requiring a manual restart...
I have other https connections that used to occasionally hang that always complete now using the following error code on the request object, as suggested on node documentation:
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
// Handle the error...
console.error("FAILED!");
});
... but it seems that timeouts on the request are ignored if the destination is piped to a file stream, maybe I should handle an error with a timeout on the filesystem object, but the documentation is not clear if there is an event I have to wait for except for 'finish'...
Here is the sample code, I hope someone can help me:
var https = require('https'),
fs = require('fs');
var opts = {
host: 'www.google.com',
path: '/',
method: 'GET',
port: 443
};
var file = fs.createWriteStream('test.html');
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
});
request.on('socket', function(socket) {
socket.setTimeout(10000);
socket.on('timeout', function() { request.abort(); });
});
request.on('error', function(e) {
console.error("FAILED!");
});
request.end();
If you wanna try the hang, change host and path with a huge file and disconnect the network cable during the transfer, it should time out after 10 seconds, but it doesn't...
I set up a demo node.js http server that sends a very slow answer and a client similar to your sample code.
When I start the client and then stop the server while sending the response then I also don't get a timeout event on the socket but I get a end event on the response within the client:
var request = https.request(opts, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(function(){
console.log("OK!");
});
});
response.on('end', function() {
// this is printed when I stop the server
console.log("response ended");
});
});
```
Maybe you could listen to that event?
i am trying to do a very simple real time notification with socket.io. for some reason i can't receive data or fire the event from server to client but from client to server yes. let me show my code:
Client Side
ngOnInit() {
this.socket.on('connect', function (res: any) {
console.log('Socket.io is connected on client side!'); // it shows on client console
});
this.socket.on('alarmsreceived', function (res: any) {
console.log(res + ' i am here now'); // is not firing
});
}
// this method fires from a click button
objectStatus = () => {
this.socket.emit('alarmsystem', 'i am client going to server');
}
Server
var io = require('socket.io').listen(server);
var connections = [];
io.of('/api/v1/monitoring').on('connect', function(socket){
connections.push(socket);
console.log('Connected %s sockets', connections.length); // i see connection on cmd
socket.on('disconnect', function() {
connections.splice(connections.indexOf(socket), 1);
console.log('Connected %s sockets', connections.length);
});
socket.on('alarmsystem', function(res) {
console.log(res); // this shows me the message from client
io.sockets.emit('alarmsreceived', 'I am server heading to client');
});
})
it seems pretty straight forward, but not firing the client event. Can someone help me what i am doing wrong here? Thanks in advance
I have a NodeJS App with Socket Io integration. Now my web page and app both has been implemented but I am facing one issue during execution:
Below is my web page code:
<script>
$(document).ready(function(){
$("#batch")[0].reset();
var socket = io.connect('http://xx.xx.xxx.xx:xxxx',{'forceNew':true });
socket.on('message', function (data) {
var newtext = data;
document.batch.outputtext.value += newtext;
});
socket.on('end', function (data) {
socket.disconnect();
});
});
</script>
And my NodeJS App:
exec_script = function(resp) {
socket.on('connection', function (client) {
console.log('Connection Established');
client.on('disconnect', function () {
console.log('disconnected');
return;
});
var pyshell = new PythonShell('./test.py', options ={ mode: 'text', pythonOptions: ['-u'] });
pyshell.stdout.on('data', function(data) {
client.emit('message', data);
console.log(data);
});
pyshell.end(function(err) {
if (err) throw err;
console.log('End Script');
client.emit('end', 'end');
client.disconnect();
});
});
};
The issue I am facing is that when the Python scripts executes its output is send to browser while my browser status states "Waiting for xx.xx.xxx.xx" and in my FF I see the blue circle circling - that's fine - but even after the Python script has ended and socket disconnected explicitly I still see the browser status as "Waiting for xx.xx.xxx.xx browser title as Connecting with the blue circle rotating?
How can I close and end the connection successfully with the fact that I need the same page in the browser that is I would not navigate the user to some other page?
I tried by using response.end() but the issue I am facing is that if the request data was posted as URL form /today then calling response.end() changes the URL at browser side to http://xx.xx.xxx.xx:xxxx/today leading to a blank / error page which is what I a do not want in my case - the URL should remain as http://xx.xx.xxx.xx:xxxx?
Below is the method I am calling my exec_script method:
router.post('/', function(req, res) {
methods.process(req, res);
});
exports.process = function(req, resp) {
var bname = req.body['date'];
if(typeof req.body['date'] !== "undefined" && req.body['date'] !== null)
{
exec_script(req, resp);
}
};
I built a simple TCP server and a simple TCP client in Node.js
Now, when the client sends "exit" to the server, the connection is successfully closed. The server deletes the socket from its sockets list and sends "Bye bye!" to the client.
The connection on the client is closed as well but the app is still waiting for other inputs, so it doesn't die and I'm forced to type CTRL+C.
I tried adding process.exit() after connection closes but it doesn't work:
CLIENT CODE:
var net = require('net'),
config = require(__dirname + '/config.json'),
connection = net.createConnection(config.port, config.host);
connection.setEncoding('utf8');
connection.on('connect', function () {
console.log('Connected');
});
connection.on('error', function (err) {
console.error(err);
});
connection.on('data', function (data) {
console.log('ยป ' + data);
});
connection.on('close', function() {
console.log('Connection closed');
});
process.stdin.on('data', function (data) {
if ((new String(data)).toLowerCase() === 'exit') {
connection.end();
process.exit();
}
else {
connection.write(data);
}
});
process.stdin.resume();
SERVER CODE:
var server = require('net').createServer(),
config = require(__dirname + '/config.json'),
sockets = [];
server.on('connection', function (socket) {
socket.setEncoding('UTF-8');
socket.on('data', function (data) {
console.log('Received data: ' + data);
if (data.trim().toLowerCase() === 'exit') {
socket.write("Bye bye!\n");
socket.end();
}
else {
sockets.forEach(function (client) {
if (client && client != socket) {
client.write(data);
}
});
}
});
socket.on('close', function () {
console.log('Connection closed');
sockets.splice(sockets.indexOf(socket), 1);
console.info('Sockets connected: ' + sockets.length);
});
sockets.push(socket);
});
server.on('listening', function () {
console.log('Server listening');
});
server.on('close', function () {
console.log('Server is now closed');
});
server.on('error', function (err) {
console.log('error:', err);
});
server.listen(config.port);
EDIT:
I added a client connection "on close" event handler. So, the string "Connection closed" is now printed by the server and by the client too.
I think you're looking for this: socket.unref().
From Node.js documentation (https://nodejs.org/api/net.html#net_socket_unref):
socket.unref()#
Calling unref on a socket will allow the program to exit if this is the only active socket in the event system. If the socket is already unrefd calling unref again will have no effect.
Some time ago when improving the tests suite for node-cubrid module, I had encountered the same problem. After all tests have passed, nodeunit process didn't quit because node-cubrid was using connection.end() to close the client socket when timeout occurs, just like you did.
Then I replaced connection.end() with connection.destroy(), a cleaner way to ensure the socket is really closed without actually terminating the running process, which, I think, is a better solution than the above suggested process.exit(). So, in your client code context, I would do:
process.stdin.on('data', function (data) {
if ((new String(data)).toLowerCase() === 'exit') {
connection.destroy();
}
else {
connection.write(data);
}
});
According to Node.js documentation:
socket.destroy()
Ensures that no more I/O activity happens on this socket. Only necessary in case of errors (parse error or so).
I doubt that if ((new String(data)).toLowerCase() === 'exit') is succeeding because data most likely has a trailing newline (in your server, you trim() before doing the comparison, but not in the client).
If that's fixed, you've got a logic problem: when getting "exit" you close the connection without sending "exit" to the server, so the server code that looks for "exit" will never execute.
You have to put the process.exit() instruction only on the last event handler. So, in this case you have to put it inside the client connection "on close" event handler:
CLIENT:
connection.on('close', function() {
console.log('Connection closed');
process.exit();
});
Try with Event: 'close' in the server:
http://nodejs.org/api/net.html#net_event_close