using node.js async series, but doesn't work - node.js

I want to use async to make node.js work something in order, first query two rates from mongodb,then use these rates to compute two new rate:
async.series([
function(callback){
db.collection('heros',function(err,collection){
if(err){
console.log(err);
}
if(!err){
console.log("2 collection fetched!");
collection.findOne({'id':win},function(err,result){
if (err) throw err;
rate1=result.rate;
console.log("win-rate:"+rate1);
});
collection.findOne({'id':lose},function(err,result){
if (err) throw err;
rate2=result.rate;
console.log("lose-rate:"+rate2);
});
}
});
callback(null);
}, function(callback){
var Ea= 1/(1+Math.pow(10,(rate2-rate1)/400));
var Eb= 1/(1+Math.pow(10,(rate1-rate2)/400));
var ra= rate1+16*(1-Ea);
var rb= rate2+16*(0-Eb);
console.log("ra:"+ra);
console.log("rb:"+rb);
callback(null);
},
function(callback){
db.collection('heros',function(err,collection){
if(!err){
collection.update({'id':win},{$set: {rate:ra}},function(err,result){
if(err) throw err;
if(!err){
console.log("update successful");
}
});
collection.update({'id':lose},{$set:{rate:rb}},function(err,result){
if(err) throw err;
if(!err){
console.log("update successful");
}
});
}
});
callback(null);
}
]);
but when I run it, it shows error messages:
ReferenceError: ra is not defined
it seems that nodejs jumps to computing or updating without waiting for the query complete.

You're declaring the variables like ra inside of a function block, so they're scoped to that function and unavailable elsewhere. You'd need to put them somewhere more accessible. For example, you could put them in a global variable:
var ra;
async.series([ ... ]);
Further, when you use series, you should call the callback function only when all of the work for that step has completed.
For example, one task should look like this:
db.collection('heros',function(err,collection){
if(err){
console.log(err); // log the error
callback(err); // call the async call back with the error
return; // stop
}
console.log("2 collection fetched!");
collection.findOne({'id':win},function(err,result){
if (err) { callback(err); return; } // again, handle error
rate1 = result.rate; // grab results
console.log("win-rate:"+rate1); // log
callback(null, rate1); // now indicate complete by calling
});
});
As you've got multiple async function calls within a single task, you may want to split them to multiple tasks or consider using the parallel function as well. You could even use parallel within one of the other tasks in the series call to handle the case where you have two findOne calls that need to be made.

Related

Callback executed to early when i create a index

my callback method executes to early. CreateIndexMaxFrame should start after CreateIndexFrame is done. But it starts while "CreateIndexFrame" is still indexing. Also the function console.log("Done") should executed when everthing is done. But it also starts to early.
createIndexFrame(dbObject, collectionName,function()
{
createIndexMaxFrame(dbObject, collectionName,function()
{
io.emit('message', { text: "Fertig" });
db.close();
console.log("DONE");
});
});
function createIndexFrame(dbObject,collectionName, callback)
{
dbObject.collection(collectionName).createIndex({frame:1, temp:-1},function(err, res) {
if (err) throw err;
callback();
});
}
function createIndexMaxFrame(dbObject,collectionName, callback)
{
dbObject.collection(collectionName).createIndex({maxFrame:-1},function(err, res) {
if (err) throw err;
callback();
});
}
https://i.ibb.co/bPzK3Bq/callback.png screenshot
The screenschot is a log by Docker and will show the issue. Mongo_1 shows all MongoLogs. Fill_db_1 is the programm which execute createIndexFrame, createIndexMaxFrame.
As you can see MongoDB will create 2 Indexs parallel.
[conn5] = is createIndexFrame()
[conn2] = is createIndexMaxFrame()
Also the message "DONE" is alredy showing. Although createIndexMaxFrame is not finished.

How to properly export/require

I'm trying to export one function this way:
exports.query = function(request){
conn.query(request, function(err, rows, fields){
if(err) console.log(err);
return rows[0].id;
});
}
and using it:
var mysql = require('./mysql');
console.log(mysql.query('SELECT * FROM tablename'));
Proceeding this way for getting a result involves undefined as output.
How do I to fix this, please?
Note that when I just type console.log(rows[0].id) instead of return rows[0].id it sends back 123.
Thanks in advance!
In your example, the output is being returned to the anonymous function of the database query instead of the caller of the module. You can use a callback to return output to the caller of the module.
exports.query = function(request, callback){
conn.query(request, function(err, rows, fields){
if (err) {
callback(err);
} else {
callback(null, rows[0].id);
}
});
}
Then call it like
var mysql = require('./mysql');
mysql.query('SELECT * FROM tablename', function(err, results){
if (err) {
console.error(err);
} else {
console.log(results);
}
});
That's a problem of synchrony.
the conn.query function returns undefined because it finish its execution before the results are fetched (like almost any i/o related operation on js/node).
One possible solution to that, is to provide a callback to your query function.
exports.query = function(request, cb){
conn.query(request, function(err, rows, fields){
// some "special" processing
cb(err, rows, fields);
});
};
If you're not familiar with async functions, take a look on some articles about that:
http://justinklemm.com/node-js-async-tutorial/
https://www.promisejs.org/

Node, async, and callback on all tasks finished within the iterator function

So I'm performing a task in a async.map collection, the iterator function performs several asynchronous tasks before it performs a task on all the data collected by the asynchronous tasks.
A quick pseudo example of this would look like:
var accts=//array of accounts fechted from mongodb
async.map(accts,function(acct,callback){
var likes=0;
http.get(acct.facebook,function(err,resp){/*add fan count to likes*/});
http.get(acct.twitter,function(err,resp){/*add followers to likes*/});
//mongoose Model named artist
artist.update({_id:acct._id},{fans:likes},function(err,acctsUpdate){});
}
My confusion lies in the fact that updating mongodb will probably happen before both of the async task are finished, therefore breaking my application. How would I go about doing this?
I've done something similar using async.waterfall and async.map. Hope this helps
async.waterfall([
function(callback){
request.get('/api/fancy/', function(err, r, body) {
if (err) return callback(err);
callback(null, data);
});
},
function(accts, callback){
async.map(accts, _insertAcct, function(err, results){
if (err) return done(err);
done(null, results);
});
function _insertAcct(acct, _cb){
if (!acct)
return _cb(new Error('No acct data'));
// save the acct to the db, using mongoskin
db.collection('acct').save(audit, {upsert: true}, function(err, result){;
if (err) return _cb(err);
_cb(null, result)
});
}
}
], function(err, results){
//
});

Async node.js data flow confusion

thanks for your help...struggling big time with how to handle this properly. I'm in async now, having given up on my ability to write the callbacks properly. I have snippet where I'm passing a set of random numbers (eachrecord) and passing them through to a mongoose call. Trying to create a data set from the multiple queries I pass.
My issue is that no matter what I've done for 4 hours, the "newarray" variable is always empty.
Thank you for your help -
async.forEach(arLimit, function(eachrecord, callback){
newarray = new Array;
var query = UGC_DB_Model.find({}).skip(eachrecord).limit(-1);
query.execFind(function (err, data) {
if (err)
console.log(err);
else {
newarray.push(data);
}
});
callback(null, newarray);
}, function(err, result) {
if (err) return next(err);
console.log("(it's empty): " + result);
});
There are several issues with your code:
async.forEach isn't meant to 'generate' results, that's what async.map is for;
you need to call the callback only when execFind is done, and not immediately after calling it;
your newarray is probably not necessary;
So try this instead:
async.map(arLimit, function(eachrecord, callback){
var query = UGC_DB_Model.find({}).skip(eachrecord).limit(-1);
query.execFind(function (err, data) {
if (err)
callback(err); // pass error along
else {
callback(null, [ data ]);
// although I think you mean this (because 'data' is probably an array already)
// callback(null, data);
}
});
}, function(err, result) {
if (err) return next(err);
console.log("(it's empty): " + result);
});

Process.nextTick or child_process?

I am trying to allow users to export their contact list in csv format. I am confused on how to run export_connect_csv() function. should i put it in child process or process.nextTick?
function export_connect_csv(user_id, file_location){
mysqlPool.getConnection(function(err, connection){
var csv_row = "Email,First Name,Last Name,Status,Created\n";
function processRow (row) {
var csv_row = row.email+','+row.first_name+','+row.last_name+','+row.status+','+row.created+"\n";
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
});
}
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
var query = connection.query('SELECT * FROM contacts where user_id = "'+user_id+'"');
query
.on('error', function(err) {
//handle error
})
.on('fields', function(fields) {
})
.on('result', function(row) {
processRow(row);
})
.on('end', function() {
//email now
console.log('done');
});
});
});
}
var exportContacts = function(req, res){
var user_id = req.params.user_id || 0;
export_connect_csv(user_id);
res.json({});
};
You don't need to use either, you can just call the function. All of that code will run assynchronously, both getConnection and fs.appendFile. However, you will run into a conflict in the case two users try to export at the same time. You have the following options:
1) You pass a unique file_name every time you call that function
2) You keep things exactly as they are and use fs.appendFileSync to make sure they don't overlap each other but that would block you
3) Or probably the best solution is do what you intended to do with the Process.nextTick, but instead you should use setImmediate and appendFileSync to be able to synchronize writes from several users simultaneously (write only a row at a time to avoid blocking for long periods):
setImmediate(function () {
fs.appendFileSync('filename', JUST_A_SINGLE_ROWW)
});
This is because a recursive process.nextTick can starve the event loop and effecively block you (hence the use of setImmediate) and you need to use fs.appendFileSync because two users might write to the same file simultaneously.
More on setImmediate vs nextTick:
setImmediate vs. nextTick
More info on appendFile: http://nodejs.org/api/fs.html#fs_fs_appendfile_filename_data_options_callback

Resources