Node setInterval in prototype - setinterval

can anybody explain me why setInterval did not work in an prototype?
Im using Node v4.8.2
function timers(){
console.log("new timer");
}
timers.prototype.start = function(){
console.log("starting timer with interval: 1000");
setInterval(function(){console.log("aa");}, 1000);
}
timer = new timers();
timer.start();
while (true) {
}
the output is only:
new timer
starting timer with interval: 1000

JavaScript isn't multi-threaded. Your while(true) {} is blocking execution of the setInterval. Take that out and see what happens.

Related

NodeJS child process kill

Is there a way of ensuring that a child process has been killed?
I currently have the following code:
let p = child_process.spawn(app, args);
...
try{
p.kill('SIGKILL');
} catch(e) {
console.error("Killing process exception:", e);
}
job = setInterval( () => {
if(p.killed || timeout === true){
clearInterval(job);
callback();
}
}, 100);
setTimeout( () => {
console.log("Killing process timeout!");
timeout = true;
}, 1000);
I check periodically (100 ms period) if the killing signal has been properly send to the process and, in that moment, I assume that the process has been killed; but, to ensure that the process is not locked, I set a timeout of 1 second.
Many times the timeout is fired, independently of waiting for 1 second or 10 seconds.
The code below is executed in linux; if working in WSL, then everything seems to work properly

How can I handle pm2 cron jobs that run longer than the cron interval?

I have a cron job running on pm2 that sends notifications on a 5 second interval. Although it should never happen, I'm concerned that the script will take longer than 5 seconds to run. Basically, if the previous run takes 6 seconds, I don't want to start the next run until the first one finishes. Is there a way to handle this solely in pm2? Everything I've found says to use shell scripting to handle it, but it's not nearly as easy to replicate and move to new servers when needed.
As of now, I have the cron job just running in a never ending while loop (unless there's an error) that waits up to 5 seconds at the end. If it errors, it exits and reports the error, then restarts because it's running via pm2. I'm not too excited about this implementation though. Are there other options?
edit for clarification of my current logic -
function runScript() {
while (!err) {
// do stuff
wait(5 seconds - however long 'do stuff' took) // if it took 1 second to 'do stuff', then it waits 4 seconds
}
}
runScript()
This feels like a hacky way to get around the cron limits of pm2. It's possible that I'm just being paranoid... I just wanna make sure I'm not using antipatterns.
What do you mean you have the cron job running in a while loop? PM2 is starting a node process which contains a never-ending while loop that waits 5 seconds? Your implementation of a cron seems off to me, maybe you could provide more details.
Instead of a cron, I would use something like setTimeout method. Run your script using PM2 and in the script is a method like such:
function sendMsg() {
// do the work
setTimeout(sendMsg, 5000); // call sendMsg after waiting 5 seconds
}
sendMsg();
By doing it this way, your sendMsg function can take all the time it needs to run, and the next call will start 5 seconds after that. PM2 will restart your application if it crashes.
If you're looking to do it at specific 5 second intervals, but only when the method is not running, simply add a tracking variable to the equation, something like:
let doingWork = false;
function sendMsg() {
if (!doingWork) {
doingWork = true;
// do the work
doingWork = false;
}
}
setInterval(sendMsg, 5000); // call sendMsg every 5 seconds
You could replace setInterval with PM2 cron call on the script, but the variable idea remains the same.
To have exactly 5000 ms between the end your actions:
var myAsyncLongAction = function(cb){
// your long action here
return cb();
};
var fn = function(){
setTimeout(function(){
// your long action here
myAsyncLongAction(function(){
console.log(new Date().getTime());
setImmediate(fn);
});
}, 5000)
};
fn();
To have exactly 5000 ms between the start of your actions :
var myAsyncLongAction = function(cb){
// your long action here
setTimeout(function(){
return cb();
}, 1000);
};
var fn = function(basedelay, delay){
if(delay === undefined)
delay = basedelay;
setTimeout(function(){
// your long action here
var start = new Date().getTime();
myAsyncLongAction(function(){
var end = new Date().getTime();
var gap = end - start;
console.log("Action took "+(gap)+" ms, send next action in : "+(basedelay - gap)+" ms");
setImmediate(fn, basedelay, (gap < basedelay ? 1 : basedelay - gap));
});
}, delay);
};
fn(5000);

Disconnect event fire after 2 minutes

I am using node.js and socket.io to create a chat application.
socket.on('disconnect',function(data){
console.log("disconnected");
}
This event is fired as soon as the client gets disconnected(tab closed or network problem).
I want the disconnect event to be fired after 1 minute of disconnection from client. Is that possible? Is there any configuration in socket.io?
That capability is not built into socket.io. The disconnect event fires when the socket is disconnected. Period. If you want to trigger some activity 1 minute after a disconnect, you can build your own timer to do so.
socket.on('disconnect',function(data){
setTimeout(function() {
console.log("disconnected");
}, 60 * 1000);
}
If you want to trigger your event only if the client does not reconnect within 1 minute, then you can code that also, but it is a little more involved as you have to save the timer for the setTimeout() and then cancel it if that specific client reconnects before the timer fires.
Based on what you put in your answer, here's what I think is an improved version:
(function() {
var origClose = socket.onclose;
socket.onclose = function(reason){
var self = this;
var args = Array.prototype.slice.call(arguments);
/* Delay of 1 second to remove from all rooms and disconnect the id */
setTimeout(function() {
origClose.apply(self, args);
}, 60 * 1000);
}
})();
You have some parameters to control this behavior server-side. See close timeout and heartbeat timeout here
and if you're using v1.0 you'll need to set the options in the new style described here.
Note that these don't technically do what you're asking - they simply lengthen the amount of time a client has to reconnect.
Socket.onclose is fired immediately after disconnection before the disconnect event.
disconnect event can be delayed by changing as follows.
socket.onclose = function(reason){
console.log(socket.adapter.sids[socket.id]);
Object.getPrototypeOf(this).onclose.call(this,reason);
/* Delay of 1 seconds to remove from all rooms and disconnect the id */
setTimeout(function() {
if (!this.connected) return this;
debug('closing socket - reason %s', reason);
this.leaveAll();
this.nsp.remove(this);
this.client.remove(this);
this.connected = false;
this.disconnected = true;
delete this.nsp.connected[this.id];
this.emit('disconnect', reason);
}, 60 * 1000);
}

nodejs is asynchronous/synchronous process, nonblocking/bloking IO

What is asynchronous process in nodejs?. Take a look at my testing.
+ Person.find(): query on database, it will take time
+ while(): delay for 5s
And the result in console like this serial:
the first: Listening on port 3000
the second: Wait
----third: 0
---------: 4
---------: 2
---------: 2
---------: 3
---------: 1
---------: 1
If we talk this is asynchronous process, why the program stop at while() function in 5s before run console.log('0') and console.log ('4')?
var Product = mongoose.model('Product', ProductSchema);
var myObject = new Object();
Person.find().exec(function (err, docs) {
for (var i=0;i<docs.length;i++)
{
Product.find({ user: docs[i]._id},function (err, pers) {
myObject[i] = pers;
console.log('1');
});
console.log('2');
}
console.log('3');
});
console.log('Listening on port 3000');
var startTime = new Date().getTime();
console.log('wait');
while (new Date().getTime() < startTime + 5000); //the program stop here for 5s before priting
console.log('0'); //console.log('0') and console.log('4');
app.listen(3000);
console.log('4');
The reason you're getting this order of execution is because the only asynchronous functions you're showing are Person.find().exec() and Product.find().
The order you're seeing is this:
Your variable instantiation is synchronous.
Person.find().exec() is asynchronous and doesn't block the main thread.
The main thread isn't blocked, so console.log('Listening on port 3000') runs.
console.log() is synchronous, so var startTime is set.
console.log('wait'); is synchronous and continues after execution.
The while() loop runs. It blocks the main thread.
The event loop resumes, running console.log('0');.
The listen() and console.log('4') functions are both synchronously run.
The Person.find().exec() finally runs and starts the for loop.
The for loop is also blocks. All iterations complete before continuing.
Since the loop has stopped blocking the main thread, console.log('3') is run.
The callbacks of the asynchronous functions within the loop execute.
As a summary, your program stops at the while() loop because the loop is blocking. If you want to delay execution of code, do so without blocking the main thread by using one of the global timer functions:
setTimeout(function() {
console.log('0');
app.listen(3000);
console.log('4');
}, 5000);
Node works on one process, with the exception of being able to hand off to native code for some asynchronous processing. These are usually I/O calls like network & Db accesses.
Your while (new Date().getTime() < startTime + 5000); will block. However, look at your Product.find() call. You pass it a callback function. Code that works off of callback functions is asynchronous and will let you know when that I/O has completed.

Node.js long poll logic help!

I m trying to implement a long polling strategy with node.js
What i want is when a request is made to node.js it will wait maximum 30 seconds for some data to become available. If there is data, it will output it and exit and if there is no data, it will just wait out 30 seconds max, and then exit.
here is the basic code logic i came up with -
var http = require('http');
var poll_function = function(req,res,counter)
{
if(counter > 30)
{
res.writeHeader(200,{'Content-Type':'text/html;charset=utf8'});
res.end('Output after 5 seconds!');
}
else
{
var rand = Math.random();
if(rand > 0.85)
{
res.writeHeader(200,{'Content-Type':'text/html;charset=utf8'});
res.end('Output done because rand: ' + rand + '! in counter: ' + counter);
}
}
setTimeout
(
function()
{
poll_function.apply(this,[req,res,counter+1]);
},
1000
);
};
http.createServer
(
function(req,res)
{
poll_function(req,res,1);
}
).listen(8088);
What i figure is, When a request is made the poll_function is called which calls itself after 1 second, via a setTimeout within itself. So, it should remain asynchronous means, it will not block other requests and will provide its output when its done.
I have used a Math.random() logic here to simulate data availability scenario at various interval.
Now, what i concern is -
1) Will there be any problem with it? - I simply don't wish to deploy it, without being sure it will not strike back!
2) Is it efficient? if not, any suggestion how can i improve it?
Thanks,
Anjan
All nodejs code is nonblocking as long as you don't get hunk in a tight CPU loop (like while(true)) or use a library that has blocking I/O. Putting a setTimeout at the end of a function doesn't make it any more parallel, it just defers some cpu work till a later event.
Here is a simple demo chat server that randomly emits "Hello World" every 0 to 60 seconds to and and all connection clients.
// A simple chat server using long-poll and timeout
var Http = require('http');
// Array of open callbacks listening for a result
var listeners = [];
Http.createServer(function (req, res) {
function onData(data) {
res.end(data);
}
listeners.push(onData);
// Set a timeout of 30 seconds
var timeout = setTimeout(function () {
// Remove our callback from the listeners array
listeners.splice(listeners.indexOf(onData), 1);
res.end("Timeout!");
}, 30000);
}).listen(8080);
console.log("Server listening on 8080");
function emitEvent(data) {
for (var i = 0; l = listeners.length; i < l; i++) {
listeners[i](data);
}
listeners.length = 0;
}
// Simulate random events
function randomEvents() {
emitData("Hello World");
setTimeout(RandomEvents, Math.random() * 60000);
}
setTimeout(RandomEvents, Math.random() * 60000);
This will be quite fast. The only dangerous part is the splice. Splice can be slow if the array gets very large. This can be made possibly more efficient by instead of closing the connection 30 seconds from when it started to closing all the handlers at once every 30 seconds or 30 seconds after the last event. But again, this is unlikely to be the bottleneck since each of those array items is backed by a real client connection that probably more expensive.

Resources