Nodejs asynchronus callbacks and recursion - node.js

I would like to do something like this
function scan(apath){
var files = fs.readdirSync(apath);
for(var i=0; i<files.length;i++){
var stats = fs.statSync(path.join(apath,files[i]))
if(stats.isDirectory()){
results.push(path.join(apath, files[i]))
scan(path.join(apath,files[i]))
}
if(stats.isFile()){
results.push(path.join(apath,files[i]))
}
}
}
but asynchronously.
Trying this with asynchronous functions led me to a nightmare with something like this.
function scan(apath){
fs.readdir(apath, function(err, files)){
var counter = files.length;
files.forEach(function(file){
var newpath = path.join(apath, file)
fs.stat(newpath, function(err, stat){
if(err) return callback(err)
if(stat.isFile())
results.push(newpath)
if(stat.isDirectory()){
results.push(newpath)
scan(newpath)
}
if(--counter <=0) return
})
})
}
}
All hell breaks loose in node's stack because things don't happen in logical succession as they do in synchronous methods.

you can try async module, and use like this:
function scan(apath, callback) {
fs.readdir(apath, function(err, files) {
var counter = 0;
async.whilst(
function() {
return counter < files.length;
},
function(cb) {
var file = files[counter++];
var newpath = path.join(apath, file);
fs.stat(newpath, function(err, stat) {
if (err) return cb(err);
if (stat.isFile()) {
results.push(newpath);
cb(); // asynchronously call the loop
}
if (stat.isDirectory()) {
results.push(newpath);
scan(newpath, cb); // recursion loop
}
});
},
function(err) {
callback(err); // loop over, come out
}
);
});
}
look for more about async.whilst

Related

node.js async/sync with For Loop, Query, and Additional function

Hello I am having an issue with the following sequence, I need to run multiple queries which build on each other that are in a for loop then once the final result is obtained to implement the result. I am having an issue where my for loop is looping past the query, also I need to stop the code while the findX function is running.
I know this is an async problem but I don't see how I could chain promises, or use the async npm package with needing to loop queries that depend on the result of the previous query. Thanks in advance.
function findX(){
//executes another query
}
function solve(res, connection, info, requestArr, callback){
var parentID = null;
var obj = {};
for (var i = 0; i <= requestArr.length; i++) {
connection.query("SELECT WHERE ", [parentID, requestArr[i]], function(err, results) {
if(results[0]['x']){
var obj = findX(x)
break;
}else{
parentID = results[0]['parentID'];
}
});
}
//Do stuff with obj only after the final result has been set in the for loop
}
You can use Async TimesSeries.
// Pretend this is some complicated async factory
var createUser = function(id, callback) {
callback(null, {
id: 'user' + id
});
};
// generate 5 users
async.times(5, function(n, next) {
createUser(n, function(err, user) {
next(err, user);
});
}, function(err, users) {
// we should now have 5 users
});
So, in your example would be something like this:
var iterations = requestArr.length - 1,
parentID,
obj;
var getResults = function (i, callback) {
connection.query("SELECT WHERE ", [parentID, requestArr[i]], function (err, results) {
if (results[0]['x']) {
obj = findX(x);
callback('done');
}
else {
parentID = results[0]['parentID'];
callback();
}
});
};
async.timesSeries(iterations, function (n, next) {
getResults(n, function (err) {
next(err);
});
}, function (err) {
// use obj
});

How to use a nested loop in request.head - Node JS

I am trying to move download images from parse and save it to my local. I have this piece of code that does the job for me. This works well when there is only one request but when I put in a loop, it doesn't hold good.
`for(var i = 0; i < 5; i++) {
console.log(i);//to debug
var filename = results_jsonObj[i].imageFile.name;
var uri = results_jsonObj[i].imageFile.url;
request.head(uri, function(err, res, body){
if (err){
console.log(err);
console.log(item);
return;
}else {
console.log(i); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
}`
Irrespective of the loop condition I have, only one image downloads to the mentioned directory.
Below is the op
The input is from a Json file and I have the request, fs, parse module included in the node js program.
Any help on how to go about this?
I have got this fixed now. As advised in the comments it was async which helped me do the trick.
for(var i = 0; i < 900; i++) {
async.forEachOf(results_jsonObj[i], function(value, key, callback){
var image = {};
image.key = key;
image.value = value;
if(image.key == 'imageFile')
{
var filename = image.value.name;
var uri = image.value.url;
// console.log(filename, uri);
}
request.head(uri, function(err, res, body){
if (err){
console.log(err);
// console.log(item);
return;
}else {
// console.log(i,res.headers['content-type']); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
callback();
}, function(err){
if (err) {
console.log('one of the api failed, the whole thing will fail now');
}
});
}

How to make a for statement wait multiple mongoose queries?

What I am currently trying to do is to display all the playlists with the songs in it. To do that I first find every playlists, then I do a for to loop through them all (in the same time I initialize globalArr and put the values then it will be sended as json because it's an API) and the problem is when I do another find in the loop(PlaylistSong.find or Song.find) well since it's asynchronous the find will be made when the for will be over, and I will have 0 results because they will take the value of increment when he will be at his maximum. I heard of async, I even googled but I really don't understand how to put through this code because it's a combination of for loops and async queries...
Thanks for your help.
router.get('/', function(req, res, next) {
Playlist.find(function (err, playlists) {
if (err) return next(err);
/* Loop through every playlists */
var globalArr = [];
for (var increment = 0; increment < playlists.length; ++increment)
{
globalArr[increment] = [];
globalArr[increment]["name"] = playlists[increment].name;
/* Loop through every links between Songs and Playlist */
PlaylistSong.find({idPlaylist: playlists[increment]._id}, function (err, songs) {
if (err) return next(err);
for (var songIncrement = 0; songIncrement < songs.length; ++songIncrement) {
{
console.log("increment"+increment);
globalArr[increment][songIncrement] = [];
/* Getting the actual song by his ID */
Song.find({_id: song.idSong}, function (err, song) {
if (err) return next(err);
globalArr[increment][songIncrement]["name"] = songs[songIncrement].name;
globalArr[increment][songIncrement]["artist"] = songs[songIncrement].artist;
globalArr[increment][songIncrement]["picture"] = songs[songIncrement].picture;
globalArr[increment][songIncrement]["price"] = songs[songIncrement].price;
globalArr[increment][songIncrement]["file"] = songs[songIncrement].file;
globalArr[increment][songIncrement]["difficulty"] = songs[songIncrement].difficulty;
globalArr[increment][songIncrement]["downloaded"] = songs[songIncrement].downloaded;
});
}
}});
}
res.contentType('application/json');
res.send(JSON.stringify(globalArr));
});
});
See this question and the accepted answer:
Simplest way to wait some asynchronous tasks complete, in Javascript?
It basically says to use the Async module, push all of your async function calls onto it and then use async.parallel() which gives you a callback when all of the async functions have completed.
I haven't tested it, but something like this seems like it might work:
var async = require('async');
var calls = [];
router.get('/', function(req, res, next) {
Playlist.find(function (err, playlists) {
if (err) return next(err);
/* Loop through every playlists */
var globalArr = [];
for (var increment = 0; increment < playlists.length; ++increment)
{
(function() {
var i = increment;
calls.push(function(callback) {
globalArr[i] = [];
globalArr[i]["name"] = playlists[i].name;
/* Loop through every links between Songs and Playlist */
PlaylistSong.find({idPlaylist: playlists[increment]._id}, function (err, songs) {
if (err) return next(err);
for (var songIncrement = 0; songIncrement < songs.length; ++songIncrement) {
{
console.log("increment"+i);
globalArr[i][songIncrement] = [];
/* Getting the actual song by his ID */
Song.find({_id: song.idSong}, function (err, song) {
if (err) return next(err);
globalArr[i][songIncrement]["name"] = songs[songIncrement].name;
globalArr[i][songIncrement]["artist"] = songs[songIncrement].artist;
globalArr[i][songIncrement]["picture"] = songs[songIncrement].picture;
globalArr[i][songIncrement]["price"] = songs[songIncrement].price;
globalArr[i][songIncrement]["file"] = songs[songIncrement].file;
globalArr[i][songIncrement]["difficulty"] = songs[songIncrement].difficulty;
globalArr[i][songIncrement]["downloaded"] = songs[songIncrement].downloaded;
});
}
callback();
}});
});
})();
}
async.parallel(calls, function(err, result) {
if (err) {
// TODO: Handle error here
}
res.contentType('application/json');
res.send(JSON.stringify(globalArr));
});
});
});
or if you don't want then to execute in parallel, you can use async.series() instead.
See this jsFiddle for a simplified example of your situation... https://jsfiddle.net/bpursley/fj22hf6g/
Yeah, you should use async. I'll explain this in more detail later (son must go to bed...)
PlaylistSong.statics.findSongsByPlaylistId = function(id, done) {
PlaylistSong.find({idPlaylist: id}, function(err, songs) {
if (err) {
done(err)
return
}
var getSongsFns = songs.map(function(song) {
return function(callback) {
Song.find({_id: song.idSong}, callback)
}
})
async.parallel(getSongsFns, done)
})
}
router.get('/', function(req, res, next) {
Playlist.find(function (err, playlists) {
if (err) return next(err);
var getSongsFns = playlists.map(function(playlist) {
return function(callback) {
PlaylistSong.findSongsByPlaylistId(playlist._id, callback)
}
})
async.parallel(getSongsFns, function(err, songs) {
if (err) {
res.status(500).send()
return
}
res.contentType('application/json');
res.send(JSON.stringify(songs));
})
});
});

Node.js callbacks and recursion

I don't understand how to call a function recursively in node.js for example:
var releaseStock = function (callback) {
getItems(function (err, items) {
if (err) {
return callback(err);
} else {
if (items) {
return callback(items);
} else {
setTimeout(function() {
releaseStock(callback);
}, 5000);
}
}
});
};
How can i make it work?
I'm not entirely sure what you want to do, but I suspect it is something along the lines of:
var releaseStock = function(callback) {
// get items from somewhere:
var items = getItems();
if (!items) {
// if there are no items, try again (recurse!):
return releaseStock(callback);
}
// if there are items, give them to the callback function:
return callback(items);
};

How to transform synchronous style code to asynchronous code?

I have some synchronous code that looks like this:
function bulk_upload(files, callback) {
for (file in files) {
sync_upload(file); // blocks till file uploads
}
print('Done uploading files');
callback();
}
I now have to use an asynchronous API async_upload(file, callback) instead of sync_upload(file) to do the same. I have various options but not sure what is the best:
1) Use a sleep after the for-loop - that's a hack as I have to hope my timing is correct
2) Recursively chain my array:
function bulk_upload(files, callback) {
if (files.length == 0) {
print('Done uploading files');
callback();
} else {
async_upload(files.removeLast(), function() { bulk_upload(files, callback); });
}
}
This is not only hacky but sub-optimal as I could have uploaded my files in parallel using the new async_upload API but I ended up uploading sequentially.
3) Use a global counter:
function bulk_upload(files, callback) {
uploads = 0
for(file in files) {
async_upload(file, function() { uploads++; });
}
while(uploads < files.length) ; // Empty spin - this is stupid
print('Done uploading files');
callback();
}
4) Slightly better counter (but still awful):
function bulk_upload(files, callback) {
uploads = 0
for(file in files) {
async_upload(file, function() {
if (++uploads == files.length) { // this becomes uglier as I want to await on more stuff
print('Done uploading files');
callback();
};
});
}
}
You can use the async module's forEach method to do this:
function bulk_upload(files, callback) {
async.forEach(files, async_upload(file, callback), function (err) {
if (err) {
console.error('Failed: %s', err);
} else {
console.log('Done uploading files');
}
callback(err);
});
}
Further to my comment, it looks like this code will suffice using futures (untested).
function aync_upload_promise(file) {
// create a promise.
var promise = Futures.promise();
async_upload( file, function(err, data) {
if (err) {
// break it
promise.smash(err);
} else {
// fulfill it
promise.fulfill(data);
}
});
return promise;
}
var promises = [];
for(var i=0; i<files.length; ++i )
{
promises.push( aync_upload_promise( files[i] ) );
}
Futures
.join( promises )
.when( function() {
print('Done uploading files');
callback();
} )
.fail( function(err) { print('Failed :(', err); } )

Resources