Node setInterval to modify global variable and access it from other module - node.js

I need to check every 4 seconds if an ip is alive.
I'm using a global variable global.baseip (I know it's bad practice, I am only trying it out with setInterval)
global.baseip = "http://10.***.**.**:8048/TESTDEV02/ODataV4/Company('****')";
setInterval(function(){
getip();
console(baseip); //this gives the correct value
}, 4000);
var getip = function() {
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}
Then, I use baseip in other modules/files but it's always undefined.
Any help would be appreciated

Don't use a global. Though you may get it to work as you have discovered it is not easy to get it to work and is easy to forget something that causes your code to not work. It is also messing with node's internals and is deliberately working against the way node.js was designed. Node.js was explicitly designed to prevent people from using global variables. Therefore if you want to use global variables you need to somehow work around those protections.
Instead a simpler and more stable way to do this is to use a module.
One thing people don't realise about Commonjs modules is that they are singletons. This is a consequence of the module caching behavior. What this means is that if you import a module into 10 different files then you are not creating ten different objects that represent your module but all 10 files share a single object (the definition of the Singleton design pattern). Note that this works not only in node.js but also other Commonjs based systems like Webpack, React.js (jsx) and Typescript.
The code is really simple. So simple in fact that it makes no sense not to use this method because trying to circumvent the module system is far more complicated:
// shared.js
let shard_object = {
baseip: "http://10.***.**.**:8048/TESTDEV02/ODataV4/Company('****')"
}
module.exports = shared_object;
Note: You can of course write the above much simpler or much more complicated. I wrote it the way I did for clarity
Then you can share the module above with your other modules:
// getip.js
const shared = require('./shared');
var getip = function() {
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
shared.baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
shared.baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}
module.exports = getip;
and it works as expected:
// main.js
const getip = require('./getip');
const shared = require('./shared');
setInterval(function(){
getip();
console(shared.baseip); // this gives the correct value
}, 4000);

Because you define another variable with the same name inside the scope, you are in reality making changes to that variable.
Just get rid of it, so you can work with the real baseip.
var getip = function() {
//var baseip; //This variable is overshadowing your global variable
var hosts = [["10.***.**.**", 8048]];
hosts.forEach(function (item) {
var sock = new net.Socket();
sock.setTimeout(2500);
sock
.on("connect", function () {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
sock.destroy();
})
.on("error", function (e) {
baseip =
"http://'10.***.**.**':8048/TESTDEV02/ODataV4/Company('****')";
})
.connect(item[1], item[0]);
})
}

Related

How to deal with re-usable connection objects in functional programming (in Node.js)?

How does one deal with persistent objects like socket connections in functional programming?
We have several functions like this
function doSomething(argument1, argument2) {
const connection = createConnection(argument1); // connection across the network
const result = connection.doSomething();
connection.close()
return result;
}
Each one recreating the connection object, which is a fairly expensive operation. How could one persist a connection like that in functional programming? Currently, we simply made the connection global.
Your program is going to have state. Always. Your program is going to do some I/O. Pretty much always. Functional programming is not about not doing those things, it's about controlling them: doing them in a way where such things that tend to complicate code maintenance and reason-ability are reasonably confined.
As for your particular function, I would argue that it has a problem: you've conflated creating a connection with doing something with that connection.
You probably want to start with something more like this:
const createConn = (arg) => createConnection(arg);
const doSomething = (conn, arg) => conn.doSomething(arg);
Note that this is easier to test: you can pass a mock in a unit test in a way that you can't with your original. An even better approach would be to have a cache:
const cache = new WeakMap();
const getConn = (arg) => {
const exists = cache.get(arg);
let conn;
if (!exists) {
conn = createConnection(arg);
cache.set(arg, conn);
} else {
conn = exists;
}
return conn;
}
Now your getConn function is idempotent. And a better approach still would be to have a connection pool:
const inUse = Symbol();
const createPool = (arg, max=4) => {
// stateful, captured in closure, but crucially
// this is *opaque to the caller*
const conns = [];
return async () => {
// check the pool for an available connection
const available = conns.find(x => !x[inUse]);
if (available) {
available[inUse] = true;
return available;
}
// lazily populate the cache
if (conns.length < max) {
const conn = createConn(arg);
conn.release = function() { this[inUse] = false };
conn[inUse] = true;
conns.push(conn);
return conn;
}
// If we don't have an available connection to hand
// out now, return a Promise of one and
// poll 4 times a second to check for
// one. Note this is subject to starvation even
// though we're single-threaded, i.e. this isn't
// a production-ready implementation so don't
// copy-pasta.
return new Promise(resolve => {
const check = () => {
const available = conns.find(x => !x[inUse]);
if (available) {
available[inUse] = true;
resolve(available);
} else {
setTimeout(check, 250);
}
};
setTimeout(check, 250);
});
};
}
Now the details of the creation are abstracted away. Note that this is still stateful and messy, but now the consuming code can be more functional and easier to reason about:
const doSomething = async (pool, arg) => {
const conn = await pool.getConn();
conn.doSomething(arg);
conn.release();
}
// Step 3: profit!
const pool = createPool(whatever);
const result = doSomething(pool, something);
As a final aside, when trying to be functional (especially in a language not built on that paradigm) there is only so much you can do with sockets. Or files. Or anything else from the outside world. So don't: don't try to make something inherently side-effective functional. Instead put a good API on it as an abstraction and properly separate your concerns so that the rest of your code can have all of the desirable properties of functional code.

How to avoid using global variables in Nodejs?

I have /bin/www that I execute and websocketServer.js that needs the httpServer object from /bin/www to initialize the websocket server. How can I pass httpServer to websocketServer.js without using a global variable?
websocketServer.js:
let WebSocket = require('ws')
let wss = new WebSocket.Server({ server: global.httpServer })
wss.on('connection', (connection, req) =>
...
/bin/www:
let app = require('../app')
let http = require('http')
...
global.httpServer = http.createServer(app)
httpServer.listen(port)
...
I have foo.js and bar.js that both handle user information that is only persistent at runtime (no need to store in database). How can these 2 files work with the same users? I can only think of using a shared variable aka global. Is there any way to avoid using a global variable in this situation? Should I even bother avoiding using global variables if it makes total sense to use them in my mind?
Besides, I came to this situation of needing runtime global variables because so far I've been using a database which is essentially a provider of global variables. Except now my project requires variables that need to be global but don't have to be stored in a database.
foo.js:
...
function getsCalledWhenUserLogsIn(username)
{
global.users[username] = { timer: new Timer(), websocketConnection: null, ... }
// initializing the user below
...
}
...
bar.js:
...
websocketServer.on('connection', (connection, req) =>
{
...
connection.on('message', (message) =>
{
let user = global.users[JSON.parse(message).username]
if (user)
{
user.websocketConnection = connection
...
}
...
}
...
}
...
This is not my code per se but at least it gives you some idea
I want to be able to structure my files in a way that I won't have to use global variables.
Example with global variables:
global.bar = 'hello world';
function foo1() {
console.log(bar);
}
function foo2() {
console.log(bar);
}
foo1();
foo2();
Example without global variables:
var bar = 'hello world';
function foo1(bar) {
console.log(bar);
}
function foo2(bar) {
console.log(bar);
}
foo1(bar);
foo2(bar);
This is overly simplistic, but the main point is: if you want 2 distinct things to have access to the same object without being aware of each other, you need a 'third thing' to pass this object to the first two things.
Also see: dependency injection.
As for the question if you should always do this... I would argue that avoiding globals is something you should always strive for, unless you have a good reason not to. Best practices are a good default, but break them if common sense dictates that you shouldn't apply it for specific cases.
However, I don't think that you have a valid enough case for it here. You just have to learn functions and arguments a bit better.
To avoid global variables you could use XMLHttpRequest and just GET/POST to yourself. Not the most efficient thing but it would still work.
var xhr2 = new XMLHttpRequest();
xhr2.open('GET', "http://yourserver", true);
xhr2.onreadystatechange = function() {
if (this.status == 200 && this.readyState == 4) {
if(this.responseText == '') { return;}
//your code here
}//
};//end onreadystate
xhr2.send();
You could also have global arrays that have objects and just go through them. Then you have one array. It just depends how you want to go about it.

Can we reuse an exported function in NodeJS?

I have this in my .js file:
exports.generate = function(details) {
// bunch of code
// returns Promise
}
exports.save = function(details){
exports.generate(details)
.then(function(id){
//save in db
})
}
Is it okay to use an exported function like this? Or is there a better way..?
It depends on if you want consumers of the module to be able to influence the module's behavior by overwriting exports.generate (e.g. require('foo').generate = function() {...}).
If you don't want users to be able to influence it in this way, then your best bet is going to be pulling out the generate() function and naming it, then exporting that and using the function directly by name inside save():
function generate(details) {
// ...
}
exports.generate = generate;
exports.save = function(details) {
generate(details).then(function(id) {
// ...
});
};
Otherwise if you do want to allow users to override the generate() functionality, then what you are currently doing is fine.
var _this=this;
exports.save = function(details) {
_this.generate(details) ...
};

Illegal invocation error using ES6 Proxy and node.js

I can not figure out why the following code does not work:
var os = new Proxy(require('os'), {});
console.log( os.cpus() ); // TypeError: Illegal invocation
whereas
var os = require('os');
console.log(Reflect.apply(os.cpus, os, []));
or
var os = new Proxy(require('os'), {});
console.log( os.platform() );
works as expected.
Having just skim read the source for the os package in the Node repo, it appears that the cpus() is exported from binding.getCPUs which is a C hook in the Node runtime environment.
cpus() therefore has the binding object as a function context, which is then lost through the proxy, giving you the IllegalInvocation error because there is no context to the function when you call it — although I'm hazy on the details.
platform() on the other hand is exported as function () { return process.platform; }, and hence it's just a function that returns an object, and doesn't need to be run under a specific context because Node function contexts will have the process variable specified by default (unless it has been overridden).
The following behaviour shows that applying the os as a context to the cpus function will work — proxies on function objects evidently lose the function context when calling properties.
const os = require('os');
const proxy = new Proxy(os, {}); // proxy of object, functions called get proxy context rather than os context
const cpus = new Proxy(os.cpus, {}); // proxy of function, still has os context
console.log(os.cpus()); // works (duh)
console.log(cpus()); // works
console.log(proxy.cpus.apply(os, [])); // works
console.log(proxy.cpus()); // fails with IllegalInvocation
Note: If someone can clear up the details on the JS function context for an answer I'd love to read it too.
How about composition:
const os = require('os');
const proxy = new Proxy(os, {});
Object.getOwnPropertyNames(os).forEach(k => {
var v = os[k];
if(typeof v === "function") proxy[k] = v.bind(os);
});
//the `!!` because I don't want the actual print
//only a `true` or an `Error`
console.log(!!os.cpus());
console.log(!!proxy.cpus());
console.log(!!proxy.cpus.apply(proxy, []));
and all this as a utility function to "replace" new Proxy(), where handler.bindTargetFunctions can be
either an array of keyNames to be bound (so you can be specific)
or any truthy or falsy value to determine wether all functions on the target should be bound
the code:
function proxy(target, handler){
const _proxy = new Proxy(target, handler);
if(handler.bindTargetFunctions){
let bindTargetFunctions = handler.bindTargetFunctions;
if(!Array.isArray(bindTargetFunctions)){
bindTargetFunctions = Object.getOwnPropertyNames(target)
.filter(key => typeof target[key] === "function");
}
bindTargetFunctions.forEach(key => {
_proxy[key] = target[key].bind(target);
});
}
return _proxy;
}
const os = proxy(require('os'), { bindTargetFunctions: true });
//or
//const os = proxy(require('os'), { bindTargetFunctions: ["cpus"] });
console.log(os.cpus());
Edit:
Currently I try to bind functions directly in my get handler (see github.com/FranckFreiburger/module-invalidate/blob/master/…)‌​, the drawback of my solution is that each access to a function returns a new binding.
I entioned caching in the comments. This is how this cache could look like:
function createProxy(mod){
var cache = Object.create(null);
return new Proxy(function(){}, {
get(target, property, receiver) {
var val = Reflect.get(mod._exports, property, receiver);
if(typeof val === "function"){
if(!(property in cache) || cache[property].original !== val){
cache[property] = {
original: val,
bound: bal.bind(mod._exports)
}
}
val = cache[property].bound;
}else if(property in cache){
delete cache[property];
}
return val;
}
});
}
And No, I don't consider this cache a regular object. Not because it inherits from null, but because logically, to me this is a dictionary/map. And I don't know any reason why you would ever extend or proxy a particular dictionary.

Passing a return from one function to another function that already has set parameters?

Edit: I know JS is asynchronous, I have looked over the How to Return thread. The issue I'm having is that going from "foo" examples to something specific = I'm not quite sure where to re-format this.
Also here is some context: https://github.com/sharkwheels/beanballs/blob/master/bean-to-osc-two.js
I have a question about returns in node. It might be a dumb question, but here goes. I have a function that connects to a socket, and gets OSC messages from processing:
var sock = dgram.createSocket("udp4", function(msg, rinfo) {
try {
// get at all that info being sent out from Processing.
//console.log(osc.fromBuffer(msg));
var getMsg = osc.fromBuffer(msg);
var isMsg = getMsg.args[0].value;
var isName = getMsg.args[1].value;
var isAdd = getMsg.address;
var isType = getMsg.oscType;
// make an array out of it
var isAll = [];
isAll.push(isName);
isAll.push(isMsg);
isAll.push(isAdd);
isAll.push(isType);
// return the array
console.log(isAll);
return isAll;
} catch (error) {
console.log(error);
}
});
Below I have the start of another function, to write some of that array to a BLE device. It needs name and characteristics from a different function. How do I get the below function to use isAll AND two existing parameters?
var writeToChars = function (name, characteristics) { // this is passing values from the BLE setup function
// i need to get isAll to here.
// eventually this will write some values from isAll into a scratch bank.
}
Thanks.
async call in this case be written something like this. state can be maintained in the variables in closure if required. In this particular case - you can do without any state (isAll) as well.
var isAll;
var soc = dgram.createSocket('udp4', oncreatesocket);
function oncreatesocket(msg, rinfo)
{
isAll = parseMessage(msg);
writeData(isAll);
}
function parseMessage(msg) {
...
// code to parse msg and return isAll
}
function writeData() {}
if the writeData is small enough function. It can be inside oncreatesocket without impacting the readability of the code.
Alright. So I figured out what to do, at least in this scenario. I'm sure there is a better way to do this, but for now, this works.
I'm mapping an existing global array of peripherals into the write function, while passing the OSC message to it as a parameter. This solved my issue of "how do I get two pieces of information to the same place". It figures out which peripheral is which and writes a different value to each scratch bank of each peripheral accordingly. Leaving here for future reference.
var writeToBean = function(passThrough){
var passThrough = passThrough;
console.log("in Write to bean: ", passThrough);
_.map(beanArray, function(n){
if(n.advertisement.localName === passThrough.name){
//var name = n.advertisement.localName;
n.discoverSomeServicesAndCharacteristics(['a495ff20c5b14b44b5121370f02d74de'], [scratchThr], function(error, services, characteristics){
var service = services[0];
var characteristic = characteristics[0];
var toSend = passThrough.msg;
console.log("service", service);
console.log("characteristic", characteristic);
if (toSend != null) {
characteristic.write(new Buffer([toSend]), false, function(error) {
if (error) { console.log(error); }
console.log("wrote " + toSend + " to scratch bank 3");
});
}
// not sure how to make the program resume, it stops here. No error, just stops processing.
});
}
});
}

Resources