Node.js callback ordering - node.js

I have been coding in Node.js:
var sql = ' SELECT 1 AS re';
for(var i=0;i<10;i++){
connection.query(sql,function(err,rows){
if(err){connection.rollback(function (e) {throw err;});}
else{
console.log('foo');
}
});
}
setTimeout(function(){ console.log('b }, 50);
Can I lined output up exactly? Like: foo --> bar

You will need to wrap your code either in async or in a form of a promise.
For Instance:
var sql = 'SELECT 1 AS re';
var count = 0;
async.whilst(
function() {
return count < 10;
},
function(done) {
count++;
connection.query(sql, function(err, rows) {
if(err)
connection.rollback(function(e) { done(); });
else {
console.log('foo');
done();
}
});
},
function(err) {
console.log('bar');
});
Now this will be sequential. Again, if this isn't what you were asking. I'd suggest to please update your question, so that we have a better idea of what the problem is, or at least what are your intentions of doing.

Related

Node Async.auto Callback is not a function

I've got a problem with my nodejs script and async library.
Ive got an async.auto with 2 parts. First part ok, in second part I've got another async.auto with 2 parts which are nearly the same, some changes in sql queries and arrays. But the first part here works fine, the second one the zutKopf stops with failure that the callback function cbzutk is not a function. Ive tried to fixed it somedays but can't find out the failure because its written like the first part zubKopf.
Hopefully u can help me.
Greets
async.auto({
one: function(cbone){
db.getConnection(function(err, dbs){
dbs.query("insert into rezepte (kat_id, rez_name, rez_desc, rez_img) values(?,?,?,?)",[RezeptComp.rezept.kat_id,RezeptComp.rezept.rez_name,RezeptComp.rezept.rez_desc,RezeptComp.rezept.rez_img],function(err, rows) {
if (err) {
cbone(err);
return;
}
console.log('query Rezept');
rezeptId = rows.insertId;
RezeptComp.rezept.rez_id = rows.insertId;
dbs.release();
cbone(null);
});
});
},
two: ['one', function(cbtwo){
async.auto({
zubKopf: function(cbzubk){
async.eachSeries(RezeptComp.zubereitung, function(data,next){
db.getConnection(function(err,dbzubk){
dbzubk.query("insert into zubereitungKopf (rez_id, zubk_text) values("+ rezeptId +",'"+ data.zubk_text +"')",function(err, rows) {
if (err) {
cbtwo(err);
return;
}
console.log('query zubk');
zubkId = rows.insertId;
RezeptComp.zubereitung[zubkd].zubk_id = zubkId;
dbzubk.release();
async.eachSeries(data.zubp, function(data2,next2){
db.getConnection(function(err,dbzubp){
dbzubp.query("insert into zubereitungPos (zubk_id, zubp_text) values("+ zubkId +",'"+ data2.zubp_text +"')",function(err, rows) {
if (err) {
cbtwo(err);
return;
}
console.log('query zubp');
zubpId = rows.insertId;
RezeptComp.zubereitung[zubkd].zubp[zubpd].zubp_id = zubpId;
dbzubp.release();
zubpd = zubpd + 1;
next2(null);
});
});
}, function(err, results){
zubkd = zubkd + 1;
zubpd = 0;
next(null);
});
});
});
}, function(err, results){
cbzubk(null);
});
},
zutKopf: ['zubKopf', function(cbzutk){
async.eachSeries(RezeptComp.zutaten, function(data3,next3){
db.getConnection(function(err,dbzutk){
dbzutk.query("insert into zutatenKopf (rez_id, zutk_text) values("+ rezeptId +",'"+ data3.zutk_text +"')",function(err, rows) {
if (err) {
cbzutk(err);
return;
}
console.log('query zutk');
zutkId = rows.insertId;
RezeptComp.zutaten[zutkd].zutk_id = zutkId;
dbzutk.release();
async.eachSeries(data3.zutp, function(data4,next4){
db.getConnection(function(err,dbzutp){
dbzutp.query("insert into zutatenPos (zutk_id, zutp_text) values("+ zutkId +",'"+ data4.zutp_text +"')",function(err, rows) {
if (err) {
cbzutk(err);
return;
}
console.log('query zutp');
zutpId = rows.insertId;
RezeptComp.zutaten[zutkd].zutp[zutpd].zutp_id = zutpId;
dbzutp.release();
zutpd = zutpd + 1;
next4(null);
});
});
}, function(err, results){
zutkd = zutkd + 1;
zutpd = 0;
next3(null);
});
});
});
}, function(err, results){
cbzutk(null);
});
}],
final: ['zubKopf', 'zutKopf', function(error, results){
cbtwo(null);
}]
});
}],
final: ['one', 'two', function(error, results){
callback(RezepComp);
}]
});
In async.auto depending functions receive as first a parameter from the previous function and then you can define the callback in the functionheader like two: ['one', function(parameter, callback) instead of two: ['one', function(callback).

node async module: combine parallel with retry

Here's a simple example of the use of async.parallel:
var fakeTimer = 0;
async.parallel({
one: function(callback) {
if (fakeTimer < 2) {
callback(new Error('too soon!'), null);
fakeTimer++;
} else {
callback(null, 'I am one');
}
},
two: function(callback) {
callback(null, 'I am two');
}
}, function(err, results) {
if (err) {
console.log('failed!');
} else {
console.log(results);
}
});
When this runs, of course it always ends in failure. What I'd like to do is keep retrying until fakeTimer has become large enough that the one function succeeds.
So either the whole async.parallel could be retried e.g. 5 times, or just the one function. I know that there is the async.retry feature, but I just can't get my head around how to combine that with async.parallel to achieve what I want.
I think ideally the whole async.parallel should be retried, so that it works if the error happens in any of the parallel branches, but it would be great to see an example of an overall retry and a per-branch retry.
The following seems to work:
var fakeTimer = 0;
var parallelFunctions = {
one: function(callback) {
if (fakeTimer < 2) {
callback(new Error('too soon!'), null);
fakeTimer++;
} else {
callback(null, 'I am one');
}
},
two: function(callback) {
callback(null, 'I am two');
}
};
var doThemInParallel = function(callback) {
async.parallel(parallelFunctions, function(err, results) {
callback(err, results);
});
};
var retries = 2; // must be > 2 to succeed
async.retry(retries, doThemInParallel, function(err, results) {
console.log(err, results);
});

Opening eventsources while keeping under the rate limit with Nodejs

my api only allows a maximum of 10 requests every second. I am trying to keep under this rate limit with async library. I have tried multiple functions but none of them work.
Pauseconnect and connectStream opens an eventstream for each item. let me know if you need to see their code.
async.queue --doesn't wait.
var cargo = async.queue(function (task, callback) {
setTimeout(
connectStream(task)
, 50000);
callback();
}, 1);
for(var j = 0; j < TeamList.length; j++) {
cargo.push(TeamList[j], function (err) {
});
async.eachLimit --stops at 5 and doesn't progress
async.eachLimit(TeamList, 5, pauseConnect, function(err){
if(err) {console.log(err);}
});
rate-limiter -- runs through all of them without waiting
limiter.removeTokens(1, function() {
for(var i=0; i< TeamList.length; i++){
connectStream(TeamList[i]);
}
});
async.each-- doesn't wait just runs through all of them
async.each(TeamList pauseConnect, function(err){
if(err) {console.log(err);}
});
You're missing the callback in the async.each, it should be (for example)...
async.each(TeamList pauseConnect, function(err, callback) {
if(err) {
console.log(err);
}
return callback();
});
If anyone is curious this worked for me
async.eachLimit(TeamList, 5, function(team, callback){
setTimeout(function(){
connectStream(team);
callback();
}, (5000));
}, function(err){
if(err) {console.log(err);}
});

How to get return value from the function while initialising the object in node js

I am mew to node js, I have something like this,
get_contacts(data, function(contacts) {
if (contacts.length) {
var count = contacts.length;
for (var i = 0; i < count; i++) {
result = {
id: contacts[i].id,
name: contacts[i].name,
sent1: get_sent(data.userId, contacts[i].id, function(resp) {
result.sent = resp.count;
}),
}
result1[i] = result;
}
output = {
contacts: result1,
}
} else {
output = {
error: "No Contacts.",
}
}
res.writeHead(200, {'content-type': 'text/html'});
res.end(JSON.stringify(output));
});
get_contacts is a callback function which will return contact list.result1 & result are objects. Now value for sent should come from a function get_sent, and get sent is like this
function get_sent(userId, contactId, callback) {
pool.getConnection(function(err, connection) {
connection.query("my query here", function(err, rows) {
connection.release();
if (!err) {
callback(rows);
} else {
console.log(err)
}
});
});
}
But im not getting any value since nodejs. since nodejs is async it is not waiting for the function to return value. I know, im doing it in wrong way. Please help
You need to use a callback. In simple words is a function that you'll execute after something happens. You should read more about that. You should get a book about javascript but you can start reading here for example.
About your case, you could solve it like this
//Asumming that you object `result` is global.
result = {
id: contacts[i].id,
name: contacts[i].name,
sent: -1 //Some default value
}
//Just to put the code into a function, you have to put it where you need
function constructObject (){
get_sent(uId, cId, function(err, total){
if(err){
console.log("Something was wrong.", err);
}
result.sent = total;
//Here you have your object completed
console.log(result);
});
}
//You need to use a callback
function get_sent(uId, cId, callback) {
pool.getConnection(function(err, connection) {
//Note that I add an alias here
connection.query("SELECT count(*) as total FROM table_name", function(err, rows) {
connection.release();
if (!err) {
//I am returning the result of the query and a null error
callback(err, rows[0].total);
} else {
console.log(err);
//I am returning an error
callback(err);
}
});
});
}
//For example you could call this function here
constructObject();
And it depends of what are you doing exactly but Maybe you need a callback on your constructObject too.

What's the best way to modify every document in a large MongoDB collection without overloading memory?

At the moment I'm running this task:
var skip = 0;
var limit = 5;
gulp.task('add coordinates to visits', function(done) {
(function recurse() {
Visit.find({})
.skip(skip)
.limit(limit)
.populate('zone')
.exec(function cb(err, visits) {
if (err) {
throw err;
}
if (visits.length === 0) {
return;
}
async.each(visits, function iterateEvents(visit, next) {
if (!visit.zone) {
return next();
} else if (!visit.coordinates.lat || !visit.coordinates.lng) {
visit.coordinates = {
lat: visit.zone.geo.coordinates.lat,
lng: visit.zone.geo.coordinates.lng
};
}
visit.save(next);
}, function cb(err) {
if (err) {
throw err;
}
skip += limit;
setTimeout(recurse, 1000);
});
});
})();
});
But I'm sure there must be a more elegant and optimal method than using skip, limit, `setTimeout. Is there some mongo or mongoose method for running updating tasks?
Based on our conversation in the comments it seems like Mongoose's querystream might be what you are looking for:
var stream = Visits.find().populate('zone').stream();
stream.on('data', function processDoc(visit) {
var self = this;
if (visit.zone && (!visit.coordinates.lat || !visit.coordinates.lng)) {
self.pause();
visit.update({
coordinates: {
lat: visit.zone.geo.coordinates.lat,
lng: visit.zone.geo.coordinates.lng
}
}, function(err, result) {
if (err) { console.log(err); };
self.resume();
});
}
});
stream.on('error', function(err) {
console.log('error', err);
});
stream.on('close', function() {
console.log('closed');
});

Resources