Mongoose Trying to open unclosed connection (callback hell) - node.js

I want to add a loop to the database records. But mongoose wrote that I did not close the open connection. Mongoose Trying to open unclosed connection. How to make the whole thing went in sync? Its callback hell in my code
app.get("/dobavit/", function(req, res) {
for(var i=50; i>0; i--)
{
InsertAXIXA("analitika",i,function(data){
});
}
res.end();
});
function InsertAXIXA(qwerty,page,callback){
mongoose.connect('mongodb://localhost/gazprom')
var parsedResults = [];
var req = request('http://xxx.ru/'+qwerty+"?page="+page, function (error, response, html) {
if (!error && response.statusCode == 200) {
// str = iconv.decode(html, 'utf-8');
var $ = cheerio.load(html);
$('.col-1 .col-first').each(function(i, element){
var buf = $(this);
var zagolovok = buf.children(0).children().children().eq(0).text();
var previewText = buf.children(2).children().eq(0).text();
var url = buf.children(0).children().children().eq(0).attr('href');
var picUrl = buf.children(1).children().eq(0).children().children().eq(0).attr('src');
var metadata = {
zagolovok:zagolovok,
previewText:previewText,
url:url,
typeOfnews:qwerty,
picUrl:picUrl,
qwerty:qwerty
};
var news =new News({
zagolovok: zagolovok,
previewText: previewText,
url:url,
picUrl:picUrl,
qwerty:qwerty
// created_at:Date.today()
});
news.save(function(err, news,affected){
});
parsedResults.push(metadata);
});
callback(parsedResults);
}
mongoose.connection.close()
});

You shouldn't actually need to open/close your connection on every request (see here for more about that).
Instead, you can just open your connection once when your app starts and then just let it close when the app closes.
If you leave the connection open, you can reuse the connections instead of wasting time/resources establishing a new one every time that function is called.

In my opinion, you are trying to create another connection without closing the current one. So, you might want to use:
createConnection() instead of connect().
In your case, it would look like this:
db = mongoose.createConnection('mongodb://localhost/mydb');

i was getting the same error today, the solution which i found is, we should not call the mongoose.connect function in loop or anywhere in the code which is being executed again and again.
so my example is, i was doing mongoose.connect on all request of app.
app.all("*",function(){
mongoose.connect();
})
Your code is somewhat similar to because in loop you are calling function and in function you are opening connection.
Thanks

Related

Nodejs, not waiting for Redis query to complete before continuing with execution

Using Node.js I need to load three files dynamically with a require() function by fetching the file path from Cassandra. From each file I need to fetch data that is in Redis and do some validation before loading another file from Cassandra. The issue here is: before the validation logic executes and provides results the next file's start to get loaded in parallel. The validation result comes after the loading of the second file, which shouldn't happen. The second file loading should wait for the first file validation logic to be complete and must load only if the validation result is a success. Please help me ... How do I pause or wait for Redis to complete the query in node.js???
node.js
"use strict";
var express = require('express');
var cassandra = require('cassandra-driver');
var app = express();
var Promise = require('bluebird');
var redis = Promise.promisifyAll(require('redis'));
var redisClient = redis.createClient(6379, '127.0.0.1');
var client = new cassandra.Client({contactPoints: ['127.0.0.1'], keyspace: 'poc'});
client.execute("SELECT file FROM testqry1", function (err, result) {
if (!err){
if ( result.rows.length > 0 ) {
for(var i=0; i< result.rows.length; i++){
var filePath=result.rows[i].get('file');
var newdat=Promise.promisifyAll(require(filePath));
var res = newdat(redisClient);
console.log('res:::'+res);
if (res=='failed'){
return;
}
}
} else {
console.log("No results");
}
}
});
file1.js
var crypto = require('crypto');
var redisValue='';
module.exports = function(redisclient){
redisclient.hmgetAsync("testdata", "text1").then(function(redisValue){
console.log('value from redis::'+redisValue)
}).then(function(){
var hashedUserID = crypto.createHmac('sha256', 'sample')
.update('helloworld')
.digest('hex');
function disp(value) {
console.log('value::'+value);
}
disp(hashedUserID);
console.log('redisValue::'+redisValue);
if(hashedUserID =='e043e7e68058c8a4cd686db38f01771bd7a04b8bb9a658d3cb40d0be45935094'){
redata='true';
}else{
redata='false';
}
console.log('redata::'+redata)
})
}
file2.js & file3.js as same content
var result1='';
module.exports = function(redisclient){
redisclient.hmget("testdata", "text1" , function(err, redisValue){
console.log('redisValue2 == %s',redisValue);
if(redisValue == 'test value'){
result1 = "success";
}else{
result1="failed";
}
});
return result1;
}
Output :
res:::undefined
res:::
res:::
value from redis::test data here
value::e043e7e68058c8a4cd686db38f01771bd7a04b8bb9a658d3cb40d0be45935094
redisValue::
redata::true
redisValue2 == test data here
redisValue3 == hello world test data
You say that file2/3 are "same content" but they aren't in one critical area. Per Bluebird's documentation for promisifyAll (see http://bluebirdjs.com/docs/api/promise.promisifyall.html), this feature creates an ...Async version of each core function in the Redis client. You call hmgetAsync in your first case, but you only call hmget in your others.
This is important because you're using an async pattern but with a non-async code structure. In file2/3 you set result1 inside an async callback, but then return it below each call before the call could possibly have returned.
You have two choices:
1: You can convert file2/3/etc to a fully traditional pattern by passing in a callback in addition to the redis client:
module.exports = function(redisclient, callback){
Instead of returning result1, you would then call the callback with this value:
if(redisValue == 'test value'){
callback(null, "success");
} else {
callback("failed", null);
}
2: You could convert file2/3/..N to be Promise-based, in which case you do not need to promisifyAll(require(...)) them - you can simply require() them. Such a pattern might look like:
module.exports = function(redisclient){
return redisclient.hmgetAsync("testdata", "text1");
};
This is a much simpler and cleaner option, and if you keep going with it you can see that you could probably even eliminate the require() and simply do the hmgetAsync in file1 with appropriate data returned by Cassandra. But it's hard to know without seeing your specific application needs. In any event, Promise-based patterns are generally much shorter and cleaner, but not always better - there IS a moderate performance overhead for using them. It's your call which way you go - either will work.

No blocking operation in MongoJS

I have to do an operation that calculate for me something, but I can't use the result of it, because I always stay in a wait state, in fact in my terminal remains in execution my program until I enter ctrl+C.
I have a main in nodejs for my program where I need to use my result calculated in a module.
var myJSONClient = {
"nombre" : "<nombre_cliente>",
"intervalo" : [0,0]
};
var intervalo = gestionar.gestion(myJSONClient,vector_intervalo);
console.log("intervalo: "+intervalo); //return undefined
And this is the module
var gestion = function(myJSON,vector_intervalo) {
var dburl = 'localhost/mongoapp';
var collection = ['clientes'];
var db = require('mongojs').connect(dburl, collection );
var intervalo_final;
function cliente(nombre, intervalo){
this.nombre = nombre;
this.intervalo = intervalo;
}
var cliente1 = new cliente(myJSON.nombre,myJSON.intervalo);
db.clientes.save(cliente1, function(err, saveCliente){
if (err || !saveCliente) console.log("Client "+cliente1.nombre+" not saved Error: "+err);
else console.log("Client "+saveCliente.nombre+" saved");
intervalo_final = calculate(vector_intervalo);
console.log(intervalo_final); //here I can see the right content of the variable intervalo_final
});
console.log(intervalo_final); //this is not executed
return intervalo_final;
}
exports.gestion = gestion;
Welcome to the async world! :)
First of all, you aren't doing blocking operations in Node. Actually, networking in Node is fully asynchronous.
The part you state the console.log works it's because the callback function of the db.clientes.save call. That callback states your mongo save has finished.
What asynchronous networking means?
It means that your save will be processed sometime in the future. The script will not wait for the response to continue the code. The console.log right after the save call will be executed soon as it's reached.
As for the "wait state" of your script, that it never ends, you should take a look at this question. There's the answer.

How to use filesystem's createReadStream with Meteor router(NodeJS)

I need to allow the user of my app to download a file with Meteor. Currently what I do is when the user requests to download a file I enter into a "fileRequests" collection in Mongo a document with the file location and a timestamp of the request and return the ID of the newly created request. When the client gets the new ID it imediately goes to mydomain.com/uploads/:id. I then use something like this to intercept the request before Meteor does:
var connect = Npm.require("connect");
var Fiber = Npm.require("fibers");
var path = Npm.require('path');
var fs = Npm.require("fs");
var mime = Npm.require("mime");
__meteor_bootstrap__.app
.use(connect.query())
.use(connect.bodyParser()) //I add this for file-uploading
.use(function (req, res, next) {
Fiber(function() {
if(req.method == "GET") {
// get the id here, and stream the file using fs.createReadStream();
}
next();
}).run();
});
I check to make sure the file request was made less than 5 seconds ago, and I immediately delete the request document after I've queried it.
This works, and is secure(enough) I think. No one can make a request without being logged in and 5 seconds is a pretty small window for someone to be able to highjack the created request URL but I just don't feel right with my solution. It feels dirty!
So I attempted to use Meteor-Router to accomplish the same thing. That way I can check if they're logged in correctly without doing the 5 second open to the world trickery.
So here's the code I wrote for that:
Meteor.Router.add('/uploads/:id', function(id) {
var path = Npm.require('path');
var fs = Npm.require("fs");
var mime = Npm.require("mime");
var res = this.response;
var file = FileSystem.findOne({ _id: id });
if(typeof file !== "undefined") {
var filename = path.basename(file.filePath);
var filePath = '/var/MeteorDMS/uploads/' + filename;
var stat = fs.statSync(filePath);
res.setHeader('Content-Disposition', 'attachment; filename=' + filename);
res.setHeader('Content-Type', mime.lookup(filePath));
res.setHeader('Content-Length', stat.size);
var filestream = fs.createReadStream(filePath);
filestream.pipe(res);
return;
}
});
This looks great, fits right in with the rest of the code and is easy to read, no hacking involved, BUT! It doesn't work! The browser spins and spins and never quite knows what to do. I have ZERO error messages coming up. I can keep using the app on other tabs. I don't know what it's doing, it never stops "loading". If I restart the server, I get a 0 byte file with all the correct headers, but I don't get the data.
Any help is greatly appreciated!!
EDIT:
After digging around a bit more, I noticed that trying to turn the response object into a JSON object results in a circular structure error.
Now the interesting thing about this is that when I listen to the filestream for the "data" event, and attempt to stringify the response object I don't get that error. But if I attempt to do the same thing in my first solution(listen to "data" and stringify the response) I get the error again.
So using the Meteor-Router solution something is happening to the response object. I also noticed that on the "data" event response.finished is flagged as true.
filestream.on('data', function(data) {
fs.writeFile('/var/MeteorDMS/afterData', JSON.stringify(res));
});
The Meteor router installs a middleware to do the routing. All Connect middleware either MUST call next() (exactly once) to indicate that the response is not yet settled or MUST settle the response by calling res.end() or by piping to the response. It is not allowed to do both.
I studied the source code of the middleware (see below). We see that we can return false to tell the middleware to call next(). This means we declare that this route did not settle the response and we would like to let other middleware do their work.
Or we can return a template name, a text, an array [status, text] or an array [status, headers, text], and the middleware will settle the response on our behalf by calling res.end() using the data we returned.
However, by piping to the response, we already settled the response. The Meteor router should not call next() nor res.end().
We solved the problem by forking the Meteor router and making a small change. We replaced the else in line 87 (after if (output === false)) by:
else if (typeof(output)!="undefined") {
See the commit with sha 8d8fc23d9c in my fork.
This way return; in the route method will tell the router to do nothing. Of course you already settled the response by piping to it.
Source code of the middleware as in the commit with sha f910a090ae:
// hook up the serving
__meteor_bootstrap__.app
.use(connect.query()) // <- XXX: we can probably assume accounts did this
.use(this._config.requestParser(this._config.bodyParser))
.use(function(req, res, next) {
// need to wrap in a fiber in case they do something async
// (e.g. in the database)
if(typeof(Fiber)=="undefined") Fiber = Npm.require('fibers');
Fiber(function() {
var output = Meteor.Router.match(req, res);
if (output === false) {
return next();
} else {
// parse out the various type of response we can have
// array can be
// [content], [status, content], [status, headers, content]
if (_.isArray(output)) {
// copy the array so we aren't actually modifying it!
output = output.slice(0);
if (output.length === 3) {
var headers = output.splice(1, 1)[0];
_.each(headers, function(value, key) {
res.setHeader(key, value);
});
}
if (output.length === 2) {
res.statusCode = output.shift();
}
output = output[0];
}
if (_.isNumber(output)) {
res.statusCode = output;
output = '';
}
return res.end(output);
}
}).run();
});

ClientJade - Trouble executing jade.render()

I've run into a problem in the clientside code for a Node.js app I'm working on.
The idea behind this is to update the browser immediately as the socket receives an event.
This is the script block on the client side:
script(src='/scripts/jadeTemplate.js')
script(src='/socket.io/socket.io.js')
script(type='text/javascript')
var socket = io.connect();
socket.on('obj', function(obj) {
var newsItem = document.createElement("item");
jade.render(newsItem, 'objTemplate', { object: obj });
$('#newsfeed').prepend(newsItem);
alert(obj);
});
When the alert() is placed before the jade.render(), it alerts, but if inserted after, it doesn't execute (hence, I think it's a problem with the jade.render()).
This is objTemplate.jade, referred to in line 7:
p #{object}
// That's it.
And this is a relevant snippet from the app.js:
var server = dgram.createSocket('udp4');
server.bind(41234);
server.on('message', function(buf, rinfo) {
isOnline = true;
var message = buf.toString();
io.sockets.emit('obj', message);
});
UPDATE:
Here's a link to /public/scripts/jadeTemplate.js, which IMO is too long of a snippet for a question.
If I need to provide any more snippets or files let me know. :)
Your template doesn't want an attribute object, it wants obj. Try this:
socket.on('obj', function(obj) {
var newsItem = document.createElement("item");
jade.render(newsItem, 'objTemplate', { obj: obj }); // changed object to obj
$('#newsfeed').prepend(newsItem);
alert(obj);
});
http://jsfiddle.net/trevordixon/VeYBY/ shows it working. If you change the attribute back to object, you'll get a javascript error in the console.

http.Clientrequest.abort() ends program

I'm having some issues using Node.js as a http client against an existing long polling server. I'm using 'http' and 'events' as requires.
I've created a wrapper object that contains the logic for handling the http.clientrequest. Here's a simplified version of the code. It works exactly as expected. When I call EndMe it aborts the request as anticipated.
var http = require('http');
var events = require('events');
function lpTest(urlHost,urlPath){
this.options = {
host: urlHost,
port: 80,
path: urlPath,
method: 'GET'
};
var req = {};
events.EventEmitter.call(this);
}
lpTest.super_ = events.EventEmitter;
lpTest.prototype = Object.create(events.EventEmitter.prototype, {
constructor: {
value: lpTest,
enumerable: false
}
});
lpTest.prototype.getData = function getData(){
this.req = http.request(this.options, function(res){
var httpData = "";
res.on('data', function(chunk){
httpData += chunk;
});
res.on('end', function(){
this.emit('res_complete', httpData);
}
};
}
lpTest.prototype.EndMe = function EndMe(){
this.req.abort();
}
module.exports = lpTest;
Now I want to create a bunch of these objects and use them to long poll a bunch of URL's. So I create an object to contain them all, generate each object individually, initiate it, then store it in my containing object. This works a treat, all of the stored long-polling objects fire events and return the data as expected.
var lpObject = require('./lpTest.js');
var objWatchers = {};
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
this.getData();
});
objWatchers[hosturl + hostPath] = tempLP;
}
DoSomething('firsturl.com','firstpath');
DoSomething('secondurl.com','secondpath);
objWatchers['firsturl.com' + 'firstpath'].getData();
objWatchers['secondurl.com' + 'secondpath'].getData();
Now here's where it fails... I want to be able to stop a long-polling object while leaving the rest going. So naturally I try adding:
objWatchers['firsturl.com' + 'firstpath'].EndMe();
But this causes the entire node execution to cease and return me to the command line. All of the remaining long-polling objects, that are happily doing what they're supposed to do, suddenly stop.
Any ideas?
Could it have something to do with the fact that you are only calling getData() when the data is being returned?
Fixed code:
function DoSomething(hostURL, hostPath){
var tempLP = new lpObject(hostURL,hostPath);
tempLP.on('res_complete', function(httpData){
console.log(httpData);
});
tempLP.getData();
objWatchers[hosturl + hostPath] = tempLP;
}
I have seemingly solved this, although I'm note entirely happy with how it works:
var timeout = setTimeout(function(){
objWatchers['firsturl.com' + 'firstpath'].EndMe();
}, 100);
By calling the closing function on the object after a delay I seem to be able to preserve the program execution. Not exactly ideal, but I'll take it! If anyone can offer a better method please feel free to let me know :)

Resources