Rethinkdb Node.JS .changes() unwanted looping - node.js

i have a a problem with Node JS, and rethinkdb module.
I'm currently developing program for my thesis
this pieces code of db.js:
var r = require("rethinkdb");
var host = "localhost";
var db = "example";
var port = 28015;
class dbs{
connectToDb(callback){
r.connect({
host:host,
port:port,
db:db
},function(err,connection){
return callback(err,connection);
});
}
streamAllData(tableName,callback){
this.connectToDb(function(err,conn){
r.table(tableName).changes().run(conn,function(err,cursor){
if(err){
return callback(true,err);
}
else{
cursor.next(function(err,rows){
return callback(null,rows);
});
}
});
});
}
}
and this pieces of code from server.js
var dbrs = require("./db");
var rdb = new dbrs();
var io = require("socket.io").listen(https.createServer(options,app).listen(port));
io.on("connection",function(socket){
rdb.streamAllData("userlocation",function(err,data){
socket.emit("broadcast:userlocation",data);
});
});
that result is always sending 7 same data . actually mobile phone sending cordinates to server is clean with configured interval.
this unwanted looping is always crashed my browser when im trying to draw driver location to maps.
that is a screenshot from chrome console

You method name streamAllData does not match your usage of cursor.next, which only fetches a single result. Perhaps you meant to use cursor.each instead?
See https://www.rethinkdb.com/api/javascript/next/

Related

Node.js insert variables into loaded Gumroad API

I'm new to node.js, I was creating a script to verify Serial and Product with the Gumroad Api package.
From a script I load two variables from an html file and send them to the node server
The node receives them and they are displayed, but I cannot get them to be read by the gumroad package function.
This is the script in send variables page ( and works )
$.post('http://localhost:3000/node_server_test/', {
serial:'xxxxxx-xxxxxx-xxxxxx-xxxxxx',
product:'product_id'
});
The node.js retrive the variables well.
I report only the part of the code concerned.
app.post('/node_server_test', (req, res) => {
var a = req.body.product;
var b = req.body.serial;
console.log(a);
console.log(b);
// var a = "product_id";
// var b = "xxxxxx-xxxxx-xxxxx-xxxxxx"
gumroad.verifyLicense(a,b)
.then(function(license) {
console.log('Licence Valid');
}, function() {
console.log('Invalid Licence');
});
});
in this mode the data is read and reported correctly in the console, but they are not read by the gumroad.verifyLicense(a,b) function, the serial / product pair is always invalid.
If instead I remove the comments from the variable declarations for the function
var a = "product_id";
var b = "xxxxxx-xxxxx-xxxxx-xxxxxx"
the function returns the correct result.
It's possible to do it? where am i wrong?
Thanks

Opening Maxmind db in Nodejs

I am trying to open maxmind opensource database in my nodejs application. My application recieves a list of ip addressses from a java application. Application then returns the latitude and longitude corresponding to each ip. I have succesfully done this synchronously, but i want to do it asynchronously to make things a little faster. I have written a code for this, but the application gets killed everytime. I am guessing that the reason might be simultaneous opening of the same database(I might be wrong :D). I am posting the code below. Please take a look at it and make some suggestions on where I am going wrong. Thanks!!!
app.post('/endPoint', function(req, res){
var obj = req.body;
var list = [];
var ipList = obj.ipList;
for(var i = 0; i<ipList.length; i++){
var ip = ipList[i];
//console.log(i);
maxmind.open('./GeoLite2-City.mmdb', function(err, cityLookup){
if(err) throw err;
console.log("open database");
var city = cityLookup.get(ip);
if(city!=null){
var cordinates = {'latitude': city.location.latitude, 'longitude': geodata.location.longitude};
//console.log(cordinates);
list.push(cordinates);
}
if(list.length == ipList.length){
res.json({finalMap: list});
}
});
}
});
You should open the database only once, and reuse it.
The easiest solution would be to synchronously open the database at the top of your file:
const maxmind = require('maxmind');
const cityLookup = maxmind.openSync('./GeoLite2-City.mmdb');
Reading it asynchronously wouldn't speed things up a whole lot, and because loading the database is done only once (during app startup), I don't think it's a big deal that it may temporarily block the event loop for a few seconds.
And use the cityLookup function in your request handler:
app.post('/endPoint', function(req, res) {
...
let city = cityLookup.get(ip);
...
});

Nodejs, not waiting for Redis query to complete before continuing with execution

Using Node.js I need to load three files dynamically with a require() function by fetching the file path from Cassandra. From each file I need to fetch data that is in Redis and do some validation before loading another file from Cassandra. The issue here is: before the validation logic executes and provides results the next file's start to get loaded in parallel. The validation result comes after the loading of the second file, which shouldn't happen. The second file loading should wait for the first file validation logic to be complete and must load only if the validation result is a success. Please help me ... How do I pause or wait for Redis to complete the query in node.js???
node.js
"use strict";
var express = require('express');
var cassandra = require('cassandra-driver');
var app = express();
var Promise = require('bluebird');
var redis = Promise.promisifyAll(require('redis'));
var redisClient = redis.createClient(6379, '127.0.0.1');
var client = new cassandra.Client({contactPoints: ['127.0.0.1'], keyspace: 'poc'});
client.execute("SELECT file FROM testqry1", function (err, result) {
if (!err){
if ( result.rows.length > 0 ) {
for(var i=0; i< result.rows.length; i++){
var filePath=result.rows[i].get('file');
var newdat=Promise.promisifyAll(require(filePath));
var res = newdat(redisClient);
console.log('res:::'+res);
if (res=='failed'){
return;
}
}
} else {
console.log("No results");
}
}
});
file1.js
var crypto = require('crypto');
var redisValue='';
module.exports = function(redisclient){
redisclient.hmgetAsync("testdata", "text1").then(function(redisValue){
console.log('value from redis::'+redisValue)
}).then(function(){
var hashedUserID = crypto.createHmac('sha256', 'sample')
.update('helloworld')
.digest('hex');
function disp(value) {
console.log('value::'+value);
}
disp(hashedUserID);
console.log('redisValue::'+redisValue);
if(hashedUserID =='e043e7e68058c8a4cd686db38f01771bd7a04b8bb9a658d3cb40d0be45935094'){
redata='true';
}else{
redata='false';
}
console.log('redata::'+redata)
})
}
file2.js & file3.js as same content
var result1='';
module.exports = function(redisclient){
redisclient.hmget("testdata", "text1" , function(err, redisValue){
console.log('redisValue2 == %s',redisValue);
if(redisValue == 'test value'){
result1 = "success";
}else{
result1="failed";
}
});
return result1;
}
Output :
res:::undefined
res:::
res:::
value from redis::test data here
value::e043e7e68058c8a4cd686db38f01771bd7a04b8bb9a658d3cb40d0be45935094
redisValue::
redata::true
redisValue2 == test data here
redisValue3 == hello world test data
You say that file2/3 are "same content" but they aren't in one critical area. Per Bluebird's documentation for promisifyAll (see http://bluebirdjs.com/docs/api/promise.promisifyall.html), this feature creates an ...Async version of each core function in the Redis client. You call hmgetAsync in your first case, but you only call hmget in your others.
This is important because you're using an async pattern but with a non-async code structure. In file2/3 you set result1 inside an async callback, but then return it below each call before the call could possibly have returned.
You have two choices:
1: You can convert file2/3/etc to a fully traditional pattern by passing in a callback in addition to the redis client:
module.exports = function(redisclient, callback){
Instead of returning result1, you would then call the callback with this value:
if(redisValue == 'test value'){
callback(null, "success");
} else {
callback("failed", null);
}
2: You could convert file2/3/..N to be Promise-based, in which case you do not need to promisifyAll(require(...)) them - you can simply require() them. Such a pattern might look like:
module.exports = function(redisclient){
return redisclient.hmgetAsync("testdata", "text1");
};
This is a much simpler and cleaner option, and if you keep going with it you can see that you could probably even eliminate the require() and simply do the hmgetAsync in file1 with appropriate data returned by Cassandra. But it's hard to know without seeing your specific application needs. In any event, Promise-based patterns are generally much shorter and cleaner, but not always better - there IS a moderate performance overhead for using them. It's your call which way you go - either will work.

Wait for multiple events from different modules and then start the server

I'm building an app in which I have 2 different modules.One of them is the connection to mongodb and the other module is the connection to redis for session management. Each module has 2 events in this case error and connect.
Here is an example from one of the modules:
var sessionStore = new SessionStore(app.get("sessionStore"));
sessionStore.client.on("error", function(err) {
console.error("[SessionStore]:", "connection error:", err.address + ":" + err.port, "for process:", process.pid);
var sessionStoreError = new Error("Session store error");
sessionStoreError.message = "Session store connection error";
sessionStoreError.code = err.code;
sessionStoreError.address = err.address;
sessionStoreError.port = err.port;
app.emit("error", sessionStoreError);
});
sessionStore.client.on("connect", function() {
console.log("[SessionStore]:", "connection established:", app.get("sessionStore").host + ":" + app.get("sessionStore"), "for process:", process.pid);
app.emit("ready");
});
Almost same way happens with mongodb module.
What I would like to achieve (avoiding the pyramid of doom) is something like this but when both redis and mongodb connections from their modules is successful:
app.on("ready", function(){
app.listen(app.get("port"));
});
Keep in mind that I could nest each module inside the other and perhaps require sequentialy the modules inside the connect handlers and then emit a final event to app, but this is not elegant I suppose according to my tastes that is.
Is there any elegant way to wait for 2 events and then start the server ?
bootable provides an initialization layer that you can also use for Express apps. This doesn't strictly provide a solution to your question ("wait for 2 events"), but it does solve the issue of waiting for multiple asynchronous modules before starting the server.
You can add boot phases for MongoDB and Redis. Once all phases are ready, the callback to app.boot() is called (you can check for any initialization errors there) and when everything is okay, the app can start listening for incoming connections.
Here's your session store example turned into a boot phase:
module.exports = function(app) {
app.phase(function(done) {
var sessionStore = new SessionStore(app.get("sessionStore"));
sessionStore.client.once("error", function(err) {
console.error("[SessionStore]:", "connection error:", err.address + ":" + err.port, "for process:", process.pid);
var sessionStoreError = new Error("Session store error");
sessionStoreError.message = "Session store connection error";
sessionStoreError.code = err.code;
sessionStoreError.address = err.address;
sessionStoreError.port = err.port;
return done(sessionStoreError);
});
sessionStore.client.once("connect", function() {
console.log("[SessionStore]:", "connection established:", app.get("sessionStore").host + ":" + app.get("sessionStore"), "for process:", process.pid);
return done();
});
});
};
Your main code would look something like this:
var bootable = require('bootable');
var express = require('express')
var app = bootable(express());
require('./boot/session-store')(app); // loads the module above
require('./boot/mongodb')(app); // a MongoDB boot phase
// This is the equivalent of your `app.on('ready', ...)`
app.boot(function(err) {
if (err) throw err;
app.listen(app.get("port"));
});
From the top of my head, Here is the best solution I could think of:
I think the most "elegant" way is, like you suggested, to call each module with async.waterfall, and when both of them are done, send an event to the app. I think this is a good solution but the only problem with it is that you are losing time by activating the module sequentialy and not in the same time.
To save time, maybe you can use a less "elegant" solution like this:
The redis module will emit an event called "redisReady" and the mongoDb module will emit one called "mongoReady", and in the main module there will be:
app.on("redisReady", function(){
redisModuleReady = true;
if (mongoModuleReady) {
app.listen(app.get("port"));
}
});
app.on("mongoReady", function(){
mongoModuleReady = true;
if (redisModuleReady) {
app.listen(app.get("port"));
}
});
This is much less "pretty" but saves time.
Surely there are more solutions, but this is what I came up with.

What is the proper way to use Socket.IO callbacks within classes in Node.JS?

I'm using Node.JS with Socket.IO and Express. My goal is to create a module that creates a custom Socket.IO+Express server, loaded with all the functions that my application needs. For example, keeping track of the number of clients connected at any given time.
The catch is, I'd like to have the server work as a class, if possible. I've seen other Node.JS modules use classes just fine (including Socket.IO itself) so I assume it shouldn't conflict with Node's module-oriented architecture. The reason why I need it to work with classes is that I want to be able to create multiple instances of the server with ease.
Here's what I have (simplified for brevity):
index.js
var myserver = require("./myserver");
var myserver1 = myserver.createServer();
var myserver2 = myserver.createServer();
myserver1.port = 8080;
myserver2.port = 8081;
myserver1.start();
myserver2.start();
myserver.js
var io = require("socket.io");
var express = require("express");
var path = require("path");
function MyServer()
{
this.port = 8080;
this.expressServer = null;
this.ioServer = null;
this.clientCount = 0;
}
Server.prototype.getClientCount = function()
{
return this.clientCount;
}
Server.prototype.start = function()
{
this.expressServer = express.createServer();
this.ioServer = io.listen(this.expressServer);
this.expressServer.listen(this.port);
this.ioServer.sockets.on(
"connection",
function()
{
this.clientCount++;
console.log(this.clientCount + " clients connected.");
}
);
}
exports.createServer = function() { return new MyServer(); };
The code inside the "connection" callback is incorrect, because the this keyword in Socket.IO's callback refers to the client object that triggered the "connection" event, not to the MyServer object. So is there a way to access the clientCount property from inside the callback?
Create a proper closure by copying this into another a variable (most people like to call it self but some prefer that) and using it in your connection handler:
var self=this;
this.ioServer.sockets.on(
"connection",
function()
{
self.clientCount++;
console.log(self.clientCount + " clients connected.");
}
);

Resources