node.js recursive setTimeout() inside of a psuedoclass - node.js

I think I'm having a difficult time wrapping my head around the scope of the following code:
var EventEmitter = require('events').EventEmitter, util = require('util');
// Here is the Ticker constructor:
var Ticker = function(time) {
this.time = time;
this.tick = function() {
this.emit('tick');
setTimeout(this.tick(), this.time);
};
}
util.inherits(Ticker, EventEmitter);
var newTicker = new Ticker(1000);
newTicker.on('tick', function() { console.log("TICK"); });
newTicker.tick();
What ends up happening, is that the "tick" gets called many, many times filling up the stack without the setTimeout actually setting the timeout to one second.
Could any explain to me what's going on?
EDIT: I looked a little bit more, and I still couldn't figure out the issue of scope, really.
What I ended up doing was following the solution that was given for the problem, which I tried to solve by way of setTimeout().
The solution given was:
var EventEmitter = require('events').EventEmitter, util = require('util');
// Here is the Ticker constructor:
var Ticker = function(time) {
var self = this;
this.time = time;
setInterval(function() {
self.emit('tick');
}, self.time);
};
util.inherits(Ticker, EventEmitter);
var ticker = new Ticker(1000);
ticker.on('tick', function() { console.log("TICK"); });
It does make more sense to me... What I'm wondering now is: Do inner functions and objects in javascript not keep track of their parent's scope and variables?

Your initial problem is that you were calling this.tick inside your setTimeout:
setTimeout(this.tick(), this.time);
Instead, you want to pass a reference to the this.tick function:
setTimeout(this.tick, this.time);
This fixes the problem for the first loop, but the second call to tick (which comes from the setTimeout) is not bound to the correct scope; so, you can bind the function reference to the current value of this using bind:
setTimeout(this.tick.bind(this), this.time);
(You can also use the self trick if you prefer.)
So, the full working program is:
var EventEmitter = require('events').EventEmitter, util = require('util');
// Here is the Ticker constructor:
var Ticker = function(time) {
this.time = time;
this.tick = function() {
this.emit('tick');
setTimeout(this.tick.bind(this), this.time);
};
}
util.inherits(Ticker, EventEmitter);
var newTicker = new Ticker(1000);
newTicker.on('tick', function() { console.log("TICK"); });
newTicker.tick();

Related

Node setInterval to modify global variable and access it from other module

I need to check every 4 seconds if an ip is alive.
I'm using a global variable global.baseip (I know it's bad practice, I am only trying it out with setInterval)
global.baseip = "http://10.***.**.**:8048/TESTDEV02/ODataV4/Company('****')";
setInterval(function(){
getip();
console(baseip); //this gives the correct value
}, 4000);
var getip = function() {
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}
Then, I use baseip in other modules/files but it's always undefined.
Any help would be appreciated
Don't use a global. Though you may get it to work as you have discovered it is not easy to get it to work and is easy to forget something that causes your code to not work. It is also messing with node's internals and is deliberately working against the way node.js was designed. Node.js was explicitly designed to prevent people from using global variables. Therefore if you want to use global variables you need to somehow work around those protections.
Instead a simpler and more stable way to do this is to use a module.
One thing people don't realise about Commonjs modules is that they are singletons. This is a consequence of the module caching behavior. What this means is that if you import a module into 10 different files then you are not creating ten different objects that represent your module but all 10 files share a single object (the definition of the Singleton design pattern). Note that this works not only in node.js but also other Commonjs based systems like Webpack, React.js (jsx) and Typescript.
The code is really simple. So simple in fact that it makes no sense not to use this method because trying to circumvent the module system is far more complicated:
// shared.js
let shard_object = {
baseip: "http://10.***.**.**:8048/TESTDEV02/ODataV4/Company('****')"
}
module.exports = shared_object;
Note: You can of course write the above much simpler or much more complicated. I wrote it the way I did for clarity
Then you can share the module above with your other modules:
// getip.js
const shared = require('./shared');
var getip = function() {
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
shared.baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
shared.baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}
module.exports = getip;
and it works as expected:
// main.js
const getip = require('./getip');
const shared = require('./shared');
setInterval(function(){
getip();
console(shared.baseip); // this gives the correct value
}, 4000);
Because you define another variable with the same name inside the scope, you are in reality making changes to that variable.
Just get rid of it, so you can work with the real baseip.
var getip = function() {
//var baseip; //This variable is overshadowing your global variable
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}

In node.js, why does `requiring` an IIFE create a single closure in the global space?

I have a JavaScript module that looks something like this:
const Tools = {};
Tools.log = (function {
var logPath = '';
// ... and all the stuff that is returned below
return {
setLogPath: setLogPath,
trace: trace,
debug: debug,
info: info,
warn: warn,
error: error
};
})();
module.exports = Tools;
I require this module in two files using:
var log = require(path/to/module).log;
I would have expected each require statement to create a separate closure, but I find that if I call the setLogPath method in one file, then the log path in the other file changes as well.
This is actually good for me. But I don't understand why only a single closure is being created.
I was expecting the the Tools.log function to be run every time the require statement was used, creating separate closures of the Tools.log function within the files where the require statement is used. This is obviously not the case. In these files, the const Tools = require(path/to/Tools/).log is in the global namespace if that makes a difference.
How does the require statement work?
This code shows what I was expecting:
const A = function() {
var closureVal = 'a'
function changeVal(val) {
closureVal = val;
};
function printVal() {
console.log(closureVal);
};
return {
changeVal: changeVal,
printVal: printVal
};
};
var x = A();
x.printVal(); // a
x.changeVal('x');
x.printVal(); // x
var y = A();
y.printVal(); // a
y.changeVal('y')
y.printVal(); // y
x.printVal(); // x
Actually if I change that test code above to use an IIFE like this:
const A = (function() {
var closureVal = 'a'
function changeVal(val) {
closureVal = val;
};
function printVal() {
console.log(closureVal);
};
return {
changeVal: changeVal,
printVal: printVal
};
})();
var x = A;
x.printVal(); // a
x.changeVal('x');
x.printVal(); // x
var y = A;
y.printVal(); // x
y.changeVal('y')
y.printVal(); // y
x.printVal(); // y
Then I see the same thing as with the require statement. I would expect this because in this test the A function is only being run once, with vars x,y then assigned to the resultant object as pointers.
But I still don't see the why the same is happening with the require statement, where I would expect the function to be run every time the module is loaded

declaring variable with same name causes problems in socket io

It took me a while to figure out what the problem was, but I'm wondering why it's acting like that.
Using this code, the variables player, players and socket will be undefined, causing errors.
var player = Player(socket.id, socket);
socket.on('joinHost', function(data) {
var gameID = data;
player.gameID=gameID;
var game = GAME_LIST[gameID];
game.players[socket.id]=player;
var players = game.players;
for (var p in players){
var player = players[p];
var socket = player.socket;
socket.emit('playerJoined');
}
});
Avoiding the declarations of variables with same names makes it all work correctly.
var player = Player(socket.id, socket);
socket.on('joinHost', function(data) {
var gameID = data;
player.gameID=gameID;
var game = GAME_LIST[gameID];
game.players[socket.id]=player;
var tempPlayers = game.players;
for (var p in tempPlayers){
var tempPlayer = tempPlayers[p];
var tempSocket = tempPlayer.socket;
tempSocket.emit('playerJoined');
}
});
The interesting part is, when I ran the first code, it says the player in the line player.gameID=gameID is undefined, while if I removed the code which is after player.gameID=gameID, the player was defined. Basically, the code after player.gameID=gameID caused the player to be undefined.
So, why is this happening?
When you declare var player = players[p]; it is declared for the whole function scope (the for loop doesn't has a scope of it's own).
The names in the current scope are evaluated all in the beginning, before executing the function body.
So when function(data) is called, the name player is overridden in that scope even before var gameID = data; is executed.
A minimal example:
> var x = 'foo';
> f = function() { console.log(x); var x = 'bar'; }
> f()
undefined
Javascript moves variables's declaration to the top of the scope they were defined and gives them an undefined initial value but keeps assignment in place. This is called hoisting
Your code is equivalent to :
var player = Player(socket.id, socket);
socket.on('joinHost', function(data) {
var gameID; // undefined
var game; // undefined
var players; // undefined
var player; // undefined
var socket; // undefined
gameID = data;
player.gameID=gameID; // can't set property 'gameID' of undefined
game = GAME_LIST[gameID];
game.players[socket.id]=player; // is undefined since 'player' is undefined at this stage
players = game.players; // undefined
for (var p in players){
player = players[p];
socket = player.socket;
socket.emit('playerJoined');
}
});

How to manipulate the result set of more than one function using node js

I have node js files restservice.js and mysql.js
In mysql.js i have two functions as elementlevelpricing and pricingdetail
In restservice.js i have api which has the code as :
var workload = req.body;
var workloadinfo = {
workloadId: workload.workloadId,
ownerId: workload.ownerId,
uniqueName: workload.uniqueName,
name: workload.name
}
if(workload.elements && workload.elements.length > 0)
{
var elementlevelpricingSummary = {};
var elementArray = [];
var elementinfo = {};
var metadataModified = {};
var pricingDetail = {};
async.forEachSeries(workload.elements, createResponse, function (err) {
res.send(workloadinfo);
});
function createResponse(elements,callback) {
var resourceIdentifierArray = [];
elementinfo = elements;
resourceIdentifierArray.push(elements.uri);
var resourceIdentifiers = resourceIdentifierArray.join(',');
// Get element level pricing summary
mysql.elementlevelpricing(resourceIdentifiers, function(result){
// do some stuff here
return callback();
});
};
};
I need to call the function pricingdetail in mysql.js and append the result to global variable workloadinfo (which already should have result set of elementlevelpricing and Can thats what is sent within foreachSeries ). Can anyone suggest me the profession way to accomplish this?
Use asynchronous functions. The whole point of Node.js is to avoid blocking. Blocking in Node.js is worse than blocking in threaded environments, because there aren't any other threads (though there may be other clustered processes). You're blocking the only event loop available. That means that your whole server has to wait, doing absolutely no work until your I/O is done.

http.Clientrequest.abort() ends program

I'm having some issues using Node.js as a http client against an existing long polling server. I'm using 'http' and 'events' as requires.
I've created a wrapper object that contains the logic for handling the http.clientrequest. Here's a simplified version of the code. It works exactly as expected. When I call EndMe it aborts the request as anticipated.
var http = require('http');
var events = require('events');
function lpTest(urlHost,urlPath){
this.options = {
host: urlHost,
port: 80,
path: urlPath,
method: 'GET'
};
var req = {};
events.EventEmitter.call(this);
}
lpTest.super_ = events.EventEmitter;
lpTest.prototype = Object.create(events.EventEmitter.prototype, {
constructor: {
value: lpTest,
enumerable: false
}
});
lpTest.prototype.getData = function getData(){
this.req = http.request(this.options, function(res){
var httpData = "";
res.on('data', function(chunk){
httpData += chunk;
});
res.on('end', function(){
this.emit('res_complete', httpData);
}
};
}
lpTest.prototype.EndMe = function EndMe(){
this.req.abort();
}
module.exports = lpTest;
Now I want to create a bunch of these objects and use them to long poll a bunch of URL's. So I create an object to contain them all, generate each object individually, initiate it, then store it in my containing object. This works a treat, all of the stored long-polling objects fire events and return the data as expected.
var lpObject = require('./lpTest.js');
var objWatchers = {};
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
this.getData();
});
objWatchers[hosturl + hostPath] = tempLP;
}
DoSomething('firsturl.com','firstpath');
DoSomething('secondurl.com','secondpath);
objWatchers['firsturl.com' + 'firstpath'].getData();
objWatchers['secondurl.com' + 'secondpath'].getData();
Now here's where it fails... I want to be able to stop a long-polling object while leaving the rest going. So naturally I try adding:
objWatchers['firsturl.com' + 'firstpath'].EndMe();
But this causes the entire node execution to cease and return me to the command line. All of the remaining long-polling objects, that are happily doing what they're supposed to do, suddenly stop.
Any ideas?
Could it have something to do with the fact that you are only calling getData() when the data is being returned?
Fixed code:
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
});
tempLP.getData();
objWatchers[hosturl + hostPath] = tempLP;
}
I have seemingly solved this, although I'm note entirely happy with how it works:
var timeout = setTimeout(function(){
objWatchers['firsturl.com' + 'firstpath'].EndMe();
}, 100);
By calling the closing function on the object after a delay I seem to be able to preserve the program execution. Not exactly ideal, but I'll take it! If anyone can offer a better method please feel free to let me know :)

Resources