Check function name exists and call by String - node.js

I am trying to facilitate and organize my work between socket communication and Node.JS (My project is too big)
Instead of socket.emit() function in client-side I use a function that I created:
function socketEmit(socketName, functionName, data){
socket.emit(socketName, {functionName: functionName, data: data});
}
socketEmit('exampleClass', 'exampleFunction', 'test');
socketEmit('exampleClass2', 'exampleFunction2', 'test');
After emit, I get the socket (socket.on) on the server-side with this function. And register it in the class depending on the "socketName"
socket.on('exampleClass', function (socketData){
var exampleClass = require('./server/exampleClass.js');
exampleClass.registerSocket(socket, socketData);
})
socket.on('exampleClass2', function (socketData){
var exampleClass2 = require('./server/exampleClass2.js');
exampleClass2.registerSocket(socket, socketData);
})
Within the classes has this function "registerSocket" to perform the same
exampleClass.js
module.exports = {
registerSocket: function(socket, socketData){
var functionName = socketData.functionName;
var data = socketData.data;
[functionName](socket, data);
},
}
function exampleFunction(socket, data){
console.log('test');
}
However, I have an error in this part in registerSocket function:
[functionName] is not a function

You're trying to call a an array [functionName](socket, data);:
const array = [functionName]; // array
array(socket, data); // you can't :)
Put the functions in an object, and access it using [] notation.
const fns = {
exampleFunction(socket, data){
console.log('test');
}
};
module.exports = {
registerSocket: function(socket, socketData){
var functionName = socketData.functionName;
var data = socketData.data;
fns[functionName](socket, data);
},
}

Related

Call function expression in external module

How do I call the function expression "extractUserProgress" which is situated in an external module from server.js?
EDIT
I have clarified further what is happening in my code.
I have a chain of function expressions in my module that follow from "extractUserProgress". The last function returns an array which is what I'm after.
//setGen.js (module file)
module.exports = function(app, db) {
var extractUserProgress = function() {
//Access mongoDB and do stuff
nextFunction(x)
}
var nextFunction = function(x) {
let y = [];
//calculate y
return y // this is what i'm after
}
}
//server.js
const setGen = require("./setGen")
app.get("/setGen", function(req, res){
//data here from select input
extractUserProgress //How to call from here?
console.log(y) //array from module
});
I have required the module in server.js but not sure how to export function in this scenario where the functions in module also needs to access mongoDB.
Thanks
You can achieve this easily if you change the structure of your exports a little.
const extractUserProgress = function (app, db) {
console.log('This can be called');
//Access mongoDB and do stuff
}
module.exports = {
extractUserProgress
};
you can call this function from the otherside this way.
const newFile = require('./server');
newFile.extractUserProgress(); // you can pass arguments APP and DB to this function
With the code as-is, you can't - extractUserProgress is not accessible, it's declared inside the exported function scope.
If you need it accessible, and also need to keep the exported signature, then you can return a hash of functions e.g.
module.exports = function(app, db) {
...
return {
extractUserProgress(...) {
...
},
// More functions
}
}
// Usage
const setGen = require('./setGen')(app, db)
setGen.extractUserProgress(...);
If you don't need to maintain the existing exported function, then you can export the functions as a hash instead
module.exports = {
extractUserProgress(...) {
...
},
// More functions
}
// Usage
const setGen = require('./setGen')
setGen.extractUserProgress(...);

Meteor.js Aysnc is not returning the result keep loading

Here, am calling the method to get the Quickblox result in async method. While i am print the value in console i can get it but the application keep loading not return the result.
Server side:
Meteor.methods({
allquickbloxusers_Methods: function(){
var params = {login: ["99999"]};
var asyncCall = QB1.users.get(params, Meteor.bindEnvironment(function(err, QBuser) {
if(QBuser) {
return QBuser;
} else {
return err;
}
}));
var syncCall = Meteor.wrapAsync(asyncCall);
var res = syncCall();
// now you can return the result to client.
return res;
}
});
To use Meteor.wrapAsync you want to pass it the actual function, not the result of a called function. Like so:
Meteor.methods({
allquickbloxusers_Methods: function(){
var params = {login: ["99999"]};
var syncCall = Meteor.wrapAsync(QB1.users.get)
var res = syncCall(params);
// now you can return the result to client.
return res;
}
});
Basically wrapAsync gives you back a new function that you call with the parameters of the original function.
Knowing this, you can make the function even more concise:
Meteor.methods({
allquickbloxusers_Methods: function(){
var params = {login: ["99999"]};
return Meteor.wrapAsync(QB1.users.get)(params)
}
});

How to return result values from node_redis function

I need to return the replies from this function and use the replies in other function. I am newbie to NodeJs and trying to figure out a simple solution.
var getKeys = function(key){
var addkey = key + '*'
myClient.keys(addkey, function (err, replies) {
console.log(replies);
});
}
Question 2:
Is there a way to take variable inside the node_redis function?
Example:
redis_get -> Defined function for getting values
thingsUUID[i] = thingsUUIDKey[i].split("_").pop()
redis_get("key", function(redis_items) {
console.log(thingsUUID[i]);
});
Inside redis_get thingsUUID is undefined. I want to concatenate the thingsUUID and the result redis_items
you could add the callback that is used in the myClient.keys function to your getKeys function like this:
var getKeys = function(key, callback){
var addkey = key + '*'
myClient.keys(addkey, callback);
}
getKeys("EXAMPLE_KEY", function (err, replies) {
if (err) console.error(err);
console.log(replies);
});
As myClient.keys requires a callback it is async and you can't return the response of this back into a value.
This is a good resource to get an overview about how callbacks work:
https://github.com/maxogden/art-of-node#callbacks
I'm not quite sure what you want to do, but if your variable thinksUUID is defined outside redis_get it should be accessible inside the callback:
var thinksUUID = [1,2,3,4];
var getKeys = function(key, callback){
var addkey = key + '*'
myClient.keys(addkey, callback);
}
getKeys("EXAMPLE_KEY", function (err, replies) {
if (err) console.error(err);
replies.forEach(function(item, index){
console.log(item);
console.log(thinksUUID[index]);
});
});
If you want to hand over the variable thinksUUID to your defined function redis_get you need to change your signature of redis_get(key, callback) to redis_get(key, thinksUUID, callback)
// using Node.js >= v8 built-in utility
const { promisify } = require('util');
// forcing function to return promise
const getAsync = promisify(redisClient.get).bind(redisClient);
const value = await getAsync(key);
console.log('value of redis key', value)

node.js event listener in another source file

I am trying to design a scenario where on a particular event getting triggered, I wanted a few listeners to perform some task. Trying to also manage S.R.P of code, I want to have the listeners in a different source file. I want to know if this is possible using event Emitter. Does event emitter only work on a single source file?
var events = require('events');
var em = new events.EventEmitter();
exports.saveScheme = function (req, res) {
var dal = dalFactory.createDAL(constants.SCHEME);
return new Promise.resolve(dal.PromiseSave(req.body))
.then(function(data){
var schemeId = data._id;
em.addListener('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
em.emit('FirstEvent', 'Test event emitter');
}).catch(function(error){
console.log(error);
});
};
My other source file is
var emitter = require('events').EventEmitter;
var em = new emitter();
//Subscribe FirstEvent
em.on('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
Every eventEmitter object you create is a new instance so events fired from the first one won't be triggered in the second, so the answer to your question is - no, it's not possible.
However, there are other solutions:
I think the best one is to create a centralized common eventEmitter, like so:
//firstFile.js
var common = require('./common');
var commonEmitter = common.commonEmitter;
exports.saveScheme = function (req, res) {
var dal = dalFactory.createDAL(constants.SCHEME);
return new Promise.resolve(dal.PromiseSave(req.body))
.then(function(data){
var schemeId = data._id;
commonEmitter.addListener('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
commonEmitter.emit('FirstEvent', 'Test event emitter');
}).catch(function(error){
console.log(error);
});
};
//secondFile.js
var common = require('./common');
var commonEmitter = common.commonEmitter;
//Subscribe FirstEvent
commonEmitter.on('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
//common.js
var events = require('events');
var em = new events.EventEmitter();
module.exports.commonEmitter = em;
But if you want the source file to "know" each other - You can do something like this:
//firstFile.js
var events = require('events');
var em = new events.EventEmitter();
exports.saveScheme = function (req, res) {
var dal = dalFactory.createDAL(constants.SCHEME);
return new Promise.resolve(dal.PromiseSave(req.body))
.then(function(data){
var schemeId = data._id;
em.addListener('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
em.emit('FirstEvent', 'Test event emitter');
}).catch(function(error){
console.log(error);
});
};
exports.emitter = em;
//secondFile.js
var firstFile = require('./firstFile');
var firstFileEmitter = firstFile.emitter;
//Subscribe FirstEvent
firstFileEmitter.on('FirstEvent', function (data) {
console.log('First subscriber: ' + data);
});
The event listener and emitter can be in two different files. Please see a simple example below:
The first file, let's call it eventEx.js
var EventEmitter = require('events').EventEmitter
var myEmitter = new EventEmitter();
myEmitter.on('print', (arg) => {
console.log(arg);
})
exports.myEmitter = myEmitter
And the second file is as following:
var emitter = require('./eventEx.js')
var printEmitter = emitter.myEmitter
printEmitter.emit('print', 'how was Dunkirk?')
printEmitter.emit('print', 'Dunkirk was awesome!!')
So this is how I use in my testing. I use it with Class semantics
One caveat to note that is, it is always required to register a listener before an event is emitted, because when an event is emitted, it looks for already registered set of listeners and then emits the flow over there.
//emitter.js
// The core module events has EventEmitter class which we are going to make use of
// note that we are not going to use that explicitly but by means of another class
const EventEmitter = require('events');
Class EventEmitterClass extends EventEmitter{
emitterMethod(){
this.emit('testEventEmitted', {obj:'testString object'});
}
}
module.exports = EventEmitterClass; // we export the module with the objectInstance
//listener.js
// now import the emitter.js and we get back a class - EventEmitterClass
const EventEmitterClass = require('./emitter.js');//give the actual path of js
const eventEmitterObj = new EventEmitterClass();
//now it is mandatory to register the listener before we even think of calling the //emitter method
eventEmitterObj.addListener('testEventEmitted', res => {
console.log('this is the result from the emitted event:', res);
});
//now call the method that emits the event in the class
eventEmitterObj.emitterMethod();
Now run the listener.js -> node listerner.js
Apologies, if I have explained things way too elaborate
just another example of node.js event listener in another source file typescript styles.
// file 1 (whatever.ts)
import EventEmitter from "events";
const MyEvent = new EventEmitter();
...
// somewhere fire event
MyEvent.emit('push', 'message', {
data: somedata
})
// end of file
export const MyEvents = MyEvent;
then in file 2
// note the .js as eslint will cry
import { MyEvents } from '../whatever.js'
...
MyEvents.on('push', function(event, data) {
console.log(event);
console.log(data);
// do stuff
});

how to solve 'this' problems with node libraries like async and request

I've written a node script that gets some data by requesting REST API data (using the library request). It consists of a couple of functions like so:
var data = { /* object to store all data */ },
function getKloutData() {
request(url, function() { /* store data */}
}
// and a function for twitter data
Because I want to do some stuff after fetching all the I used the library async to run all the fetch functions like so:
async.parallel([ getTwitterData, getKloutData ], function() {
console.log('done');
});
This all works fine, however I wanted to put everything inside a object pattern so I could fetch multiple accounts at the same time:
function Fetcher(name) {
this.userID = ''
this.user = { /* data */ }
this.init();
}
Fetcher.prototype.init = function() {
async.parallel([ this.getTwitterData, this.getKloutData ], function() {
console.log('done');
});
}
Fetcher.prototype.getKloutData = function(callback) {
request(url, function () { /* store data */ });
};
This doesn't work because async and request change the this context. The only way I could get around it is by binding everything I pass through async and request:
Fetcher.prototype.init = function() {
async.parallel([ this.getTwitterData.bind(this), this.getKloutData.bind(this) ], function() {
console.log('done');
});
}
Fetcher.prototype.getKloutData = function(callback) {
function saveData() {
/* store data */
}
request(url, saveData.bind(this);
};
Am I doing something basic wrong or something? I think reverting to the script and forking it to child_processes creates to much overhead.
You're doing it exactly right.
The alternative is to keep a reference to the object always in context instead of using bind, but that requires some gymnastics:
Fetcher.prototype.init = function() {
var self = this;
async.parallel([
function(){ return self.getTwitterData() },
function(){ return self.getKloutData() }
], function() {
console.log('done');
});
}
Fetcher.prototype.getKloutData = function(callback) {
var self = this;
function saveData() {
// store data
self.blah();
}
request(url, saveData);
};
You can also do the binding beforehand:
Fetcher.prototype.bindAll = function(){
this.getKloutData = this.prototype.getKloutData.bind(this);
this.getTwitterData = this.prototype.getTwitterData.bind(this);
};
Fetcher.prototype.init = function(){
this.bindAll();
async.parallel([ this.getTwitterData, this.getKloutData ], function() {
console.log('done');
});
};
You can save this into another variable:
var me = this;
Then me is your this.
Instantiate object with this function:
function newClass(klass) {
var obj = new klass;
$.map(obj, function(value, key) {
if (typeof value == "function") {
obj[key] = value.bind(obj);
}
});
return obj;
}
This will do automatic binding of all function, so you will get object in habitual OOP style,
when methods inside objects has context of its object.
So you instantiate you objects not through the:
var obj = new Fetcher();
But:
var obj = newClass(Fetcher);

Resources