I'm a total node noob and barely know what I'm doing. I'm trying to execute a series of functions in sequence, one after the other, using the futures library. My code:
var futures = require('futures');
var sequence = futures.sequence();
sequence
.then(function() {
console.log("one");
})
.then(function() {
console.log("two");
})
.then(function() {
console.log("three");
});
I expect my output to be
one
two
three
but the output I get is
one
What am I doing wrong?
Node.js is working on callback function, so you need to pass it in anonymous to make futures execute next function:
var futures = require('futures');
var sequence = futures.sequence();
sequence
.then(function(next) {
console.log("one");
next(null, 1);
})
.then(function(next) {
console.log("two");
next(null, 2);
})
.then(function(next) {
console.log("three");
next(null, 3);
});
futures is moving and changing constantly. Why not use a more robust and popular module async. It has everything you could possibly need for these kind of operations.
What you're after is async.series https://github.com/caolan/async#seriestasks-callback
async.series([
function(callback){
// do some stuff ...
callback(null, 'one');
},
function(callback){
// do some more stuff ...
callback(null, 'two');
}
],
// optional callback
function(err, results){
// results is now equal to ['one', 'two']
});
Related
this question is related with an answer to my previous question. there #robertklep recommends me to use mapLimit() instead of .map() because .map() can't handle a large series of data, and with that solution all works fine. But now I restructured my code, and now neither of the .<fn>Limit() functions run after the first loop iteration. do I missing something here?
var proccesBook = function(file, cb) {
testFile(file, function (epub) {
if (epub) {
getEpuData(file, function (data) {
insertBookInDB(data)
})
}else{
cb(file)
}
})
}
async.mapLimit(full_files_path, 10, proccesBook, function(err){
if(err){
console.log('Corrupted file', err);
} else {
console.log('Processing complete');
};
})
// ---> only runs for the first 10 series data
Your primary issue is you don't call cb in the success branch of processBook. Your control flow must guarantee to call the callback exactly once for each worker function invocation.
Other asides:
You don't seem to need the results, so eachLimit is fine
Only need mapLimit if you need the results of each worker
You need to follow the standard error-first convention when calling the callback. Don't do cb(file) as that will be interpretted as an error and about the remaining processing.
var proccesBook = function(file, cb) {
testFile(file, function (epub) {
if (epub) {
getEpuData(file, function (data) {
insertBookInDB(data)
cb() // This is what you were missing
})
}else{
cb()
}
})
}
async.eachlimit(full_files_path, 10, proccesBook, function(err){
if(err){
console.log('Corrupted file', err);
} else {
console.log('Processing complete');
};
})
I am learning Node.js; due to asynchronous of Node.js I am facing an issue:
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
for(var i=0;i<result.length;i++){
console.log("result is ",result[i].id);
var camera=null;
domain.Cameras.count({"userId": result[i].id}, function (err, cameraCount) {
if(result.length-1==i){
configurationHolder.ResponseUtil.responseHandler(res, result, "User List ", false, 200);
}
})
}
})
I want to use result in Cameras callback but it is empty array here, so is there anyway to get it?
And this code is asynchronous, is it possible if we make a complete function synchronous?
#jmingov is right. You should make use of the async module to execute parallel requests to get the counts for each user returned in the User.find query.
Here's a flow for demonstration:
var Async = require('async'); //At the top of your js file.
domain.User.find({userName: new RegExp(findtext, 'i')}).sort('-created').skip(skip).limit(limit)
.exec(function(err, result) {
var cameraCountFunctions = [];
result.forEach(function(user) {
if (user && user.id)
{
console.log("result is ", user.id);
var camera=null; //What is this for?
cameraCountFunctions.push( function(callback) {
domain.Cameras.count({"userId": user.id}, function (err, cameraCount) {
if (err) return callback(err);
callback(null, cameraCount);
});
});
}
})
Async.parallel(cameraCountFunctions, function (err, cameraCounts) {
console.log(err, cameraCounts);
//CameraCounts is an array with the counts for each user.
//Evaluate and return the results here.
});
});
Try to do async programing allways when doing node.js, this is a must. Or youll end with big performance problems.
Check this module: https://github.com/caolan/async it can help.
Here is the trouble in your code:
domain.Cameras.count({
"userId": result[i].id
}, function(err, cameraCount) {
// the fn() used in the callback has 'cameraCount' as argument so
// mongoose will store the results there.
if (cameraCount.length - 1 == i) { // here is the problem
// result isnt there it should be named 'cameraCount'
configurationHolder.ResponseUtil.responseHandler(res, cameraCount, "User List ", false, 200);
}
});
I have folowing script
var email_list = ['email1#email.com', 'email2#email.com',....'email100#email.com'];
for(i=0;i<email_list.length;i++){
if(checkEmail(email_list[i])){
//do processing save in db and email to email addresses.
}
}
This code will be blocking in nodejs how to make this non blocking?
You can do this without blocking the event loop at all, by using a recursive loop. This way what you end up with is only launching one database worker per call, at a give time. Assuming the database work you were doing was asynchronous, your code didn't really block the event loop. But the foor loop still launched a bunch of workers simultaneously, which will tend to clog the event loop(not block it). And you are right in that it is blocking the event loop while your for loop is counting from 0, to whatever the size of your array is. The following does exactly the same thing, but you only launch one database worker at a time(good), and you never count from 0 to length. Each worker is popped off the list after the work on the current email is done, and your global event loop is left to process other things, not email_list.length database requests simultaneously.
var email_list = ['email1#email.com', 'email2#email.com', 'email100#email.com'];
function checkEmailList(emails, emailCallBack, completionCallback) {
var someDataCollectdOverAllEmails = '';
function checkEmailAsync(email) {
db.doSomeDBWorkAsync(email, function (data) {
someDataCollectdOverAllEmails += data;
if (email_list.length) {
checkEmail(email_list.pop()); //If there are still emails to be checked, check the next one ine line
} else {
completionCallback(someDataCollectdOverAllEmails);//IF not, call the completionCallBack
}
emailCallBack(data);
});
}
checkEmailAsync(emails.pop());
}
function logIndividualEmailData(data) {
console.log('Sningle Email: ' + data);
}
function logGlobalEmailData(data) {
console.log('All Email Data: ' + data);
}
checkEmailList(email_list, logIndividualEmailData, logGlobalEmailData);
Process.nextTick example
process.nextTick(function () {
'use strict';
console.log('printed second');
while (true);
});
process.nextTick(function () {
'use strict';
console.log('never printed');
});
console.log('printed first');
Note however that in the example below, despite the fact that loopForever will run forever, it still allows both of our files to be read out. If we just had while(true) it would of course block and not allow this and one of our files data would not be printed out.
var files = ['blah.js', 'file.js'];
for(var i = 0; i < files.length; i++) {
fs.readFile(files[i], function (err, data) {
console.log('File data' + data);
function loopForver(loop) {//asynchronously loop forever, pretty cool, but only useful for really specific situations!
process.nextTick(function () {
if(loop) {
console.log('looping');
loopForver(true);
}
});
}
loopForver(true);
});
}
If I need to do stuff after the emails all send, I use the async library (docs), which provides some useful functions for control flow.
You will still need to rewrite checkEmail(email) into checkEmail(email, callback) as #S.D. suggests. In checkEmail you will want to call callback after everything is completed. This probably means that you will nest callbacks, calling the second async thing (sending the email) only after the first (db query) has completed successfully.
I also suggest that you follow convention by using the first callback argument as an err parameter. If you callback(null) you are explicitly saying 'there was no error'. #S.D.'s solution suggests instead callback(ok) which is the opposite of convention.
Here is an example showing a couple nested asynchronous functions and the async library.
edit - use async.eachLimit instead of async.each so you don't execute all 100 calls simultaneously
(function main(){
var emails = ["a#b", "c#d"];
var async = require('async');
async.eachLimit(
emails // array to iterate across
,10 // max simultaneous iterations
,checkEmail // an asynchronous iterator function
,function(err){ // executed on any error or every item successful
console.log('Callback of async.eachLimit');
if(err){
console.log('Error: '+err)
} else {
console.log('All emails succeeded');
};
}
);
console.log('Code below the async.eachLimit call will continue executing after starting the asynchronous jobs');
})();
function checkEmail(email, callback){
fetchFromDb(email, function(err, obj){
if(err){ return callback(err) };
sendEmail(email, function(err, obj){
if(err){ return callback(err)};
console.log('Both fetchFromDb and sendEmail have completed successfully for '+email);
callback(null);
});
});
};
function fetchFromDb(email, callback){
process.nextTick(function(){ // placeholder, insert real async function here
callback(null);
});
};
function checkEmail(email, callback){
process.nextTick(function(){ // placeholder, insert real async function here
callback(null);
});
};
I know node.js run asynchronously, so outer functions execute earlier than the inner. But what is the way to access the notification array outside the for loop? I would like to access all the values in array at once, is this feasible?
var notification=[];
for(var j=0;j<6; j++)
{
getNotification(response[j].sender_id,function(results) // a function called
{
notification[j] =results;
console.log(notification); // output: correct
});
}
console.log(notification); // output: [], need notification array values here
EDIT: If you don't want to use third party libs, this is how to do this in your own code.
/* jshint node:true*/
function getNotifications(responses, callbackToMainProgramLogic) {
'use strict';
var results = [];
function getNotificationAsync(response) {
getNotification(response.sender_id, function (data) {
results.push(data);
if (responses.length) {
getNotificationAsync(responses.pop());//If there are still responses, launch another async getNotification.
} else {
callbackToMainProgramLogic(results);//IF there aren't we're done, and we return to main program flow
}
});
}
getNotificationAsync(responses.pop());
}
getNotifications(someArrayOfResonses, function (dataFromNotifications) {
console.log('The collected data: ' + JSON.stringify(dataFromNotifications, 0, 4));
});
If you absolutely must, you could do something ridiculous like this. Your logic in the loopUntilDatReceived would be waiting for array sizes, not waiting for a non-empty string, but the idea is similar, and you shouldn't be using this anyway! :)
var fileData = '';
fs.readFile('blah.js', function (err, data) { //Async operation, similar to your issue.
'use strict';
fileData = data;
console.log('The Data: ' + data);
});
function loopUntilDataReceived() {
'use strict';
process.nextTick(function () {//A straight while loop would block the event loop, so we do this once per loop around the event loop.
if (fileData === '') {
console.log('No Data Yet');
loopUntilDataReceived();
} else {
console.log('Finally: ' + fileData);
}
});
}
loopUntilDataReceived();
Did I mention this is ridiculous? Honestly, this is an awful idea, but it may help you understand what is going on and how the Node event loop works, and why what you want is not possible. AND why the other posts about callbacks, and flow control libraries are the way to go.
First off, you're having a closure issue in your code (please see the details here)
Then, you simply can't have the array values just next to the loop, because the values are not ready at this point.
You need to wait until all 6 of your getNotification calls get resolved. You can do that with the async library. Something like:
var notification = [];
function createRequest (index) {
return function (callback) {
getNotification(response[index].sender_id, function(results) {
notification[index] = results;
callback(results);
});
}
}
var requests = [];
for(var j=0;j<6; j++) {
requests.push(createRequest(j));
}
async.parallel(requests, function (allResults) {
// notifications array is ready at this point
// the data should also be available in the allResults array
console.log(notifications);
});
Send a callback to the notification loop like this:
var notification=[];
getNotificationArray( function() {
console.log(notification);
});
function getNotificationArray (callback)
{
for(var j=0;j<6; j++)
{
getNotification(response[j].sender_id,function(results) // a function called
{
notification[j] =results;
console.log(notification); // output: correct
});
}
callback();
}
I am trying to allow users to export their contact list in csv format. I am confused on how to run export_connect_csv() function. should i put it in child process or process.nextTick?
function export_connect_csv(user_id, file_location){
mysqlPool.getConnection(function(err, connection){
var csv_row = "Email,First Name,Last Name,Status,Created\n";
function processRow (row) {
var csv_row = row.email+','+row.first_name+','+row.last_name+','+row.status+','+row.created+"\n";
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
});
}
fs.appendFile(file_location, csv_row, function (err) {
if(err){
throw err;
}
var query = connection.query('SELECT * FROM contacts where user_id = "'+user_id+'"');
query
.on('error', function(err) {
//handle error
})
.on('fields', function(fields) {
})
.on('result', function(row) {
processRow(row);
})
.on('end', function() {
//email now
console.log('done');
});
});
});
}
var exportContacts = function(req, res){
var user_id = req.params.user_id || 0;
export_connect_csv(user_id);
res.json({});
};
You don't need to use either, you can just call the function. All of that code will run assynchronously, both getConnection and fs.appendFile. However, you will run into a conflict in the case two users try to export at the same time. You have the following options:
1) You pass a unique file_name every time you call that function
2) You keep things exactly as they are and use fs.appendFileSync to make sure they don't overlap each other but that would block you
3) Or probably the best solution is do what you intended to do with the Process.nextTick, but instead you should use setImmediate and appendFileSync to be able to synchronize writes from several users simultaneously (write only a row at a time to avoid blocking for long periods):
setImmediate(function () {
fs.appendFileSync('filename', JUST_A_SINGLE_ROWW)
});
This is because a recursive process.nextTick can starve the event loop and effecively block you (hence the use of setImmediate) and you need to use fs.appendFileSync because two users might write to the same file simultaneously.
More on setImmediate vs nextTick:
setImmediate vs. nextTick
More info on appendFile: http://nodejs.org/api/fs.html#fs_fs_appendfile_filename_data_options_callback