Async parallel in NodeJS - node.js

I'd like to know how to execute async functions, my goal is to uploads files and compress them... But it doesn't work because my files are not yet uploading when I write the .zip...
SO I get an empty .zip file...
var asyncTasks = [];
selectedPhotos.forEach(function(id){
asyncTasks.push(function(callback){
var newFileName = pathDir + '/' + id + '.jpg';
api.media(id, function(err, media, remaining, limit) {
gm()
.in('-page', '+0+0')
.in('./public/images/instabox.jpg')
.in('-page', '+10+10')
.in(media.images.thumbnail.url)
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
});
});
callback();
});
});
async.parallel(asyncTasks, function(){
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});

You're calling the callback() too early. Move callback(); inside of your .write() callback like so:
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
callback(err);
});
When you execute the callback, that signifies that the task is finished. So without the above change, you're basically telling async that you're done immediately.

Related

Downloads are not complete while creating write stream

I have function with a callback to make sure the image download is finished.
If I have multiple images to be downloaded(around 11), It downloads all but the file size is not correct. Therefore some are corrupted.
I thought using the callback will make sure the downloads are done completely before moving on.
This is the code below I am using:
imageExtractor(imageId,function(){
zipmaker(imageId, function () {
});
});
imageIndex =0;
function imageExtractor(imageId, callback) {
images.ImageIds.foreach(function (image){
imageIndex++;
// Here I call the download image function
downloadImageFromURLAndSave(imageURL, imageId, category,callback) {
if(images.length=seriesIndex){
callback();
}
}
}
function downloadImageFromURLAndSave(imageURL, imageId, category,callback) {
console.log("Download has started.");
console.log(imageURL);
request
.get(imageURL)
.pipe(fs.createWriteStream(__dirname + category+
'/digital/' + imageId)
.on('finish', function() {
console.log("Download has finished for " + imageId+ " Congratulations.");
callback();
}));
}
It seems the function after this(zipmaker) is called early before the images are downloaded even though I have a callback as you can see.
If I where you, here is how I would structure your code:
index.js:
var fs = require('fs'),
request = require('request');
var config = require('./test.json');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
for (var key in config) {
if (config.hasOwnProperty(key)) {
download(key, config[key], function(){
console.log('done');
});
}
}
test.json:
{
"https://www.google.com/images/srpr/logo3w.png": "google.png",
"http://www.jqueryscript.net/images/Simplest-Responsive-jQuery-Image-Lightbox-Plugin-simple-lightbox.jpg": "panda.jpg"
}
Hope this helps!

"object is not a function" error during MongoDB document insertion from a CSV stream using async.queue

I'm trying MongoDB document insertion from a CSV stream using async.queue.
But I face this following error. I've tried all the remedies given in similar SO posts.
Exact error message is:
C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:406
throw err
TypeError:object is not a function
at C:\Users\admin\Desktop\mynodefile.js:13:2
at C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:403:11
at process._tickCallback(node.js:355:11)
node.js code I used:
var csv = require('csv');
var async = require('async');
var fs = require('fs');
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017', function(err, db) {
if (err) throw err;
var collection = db.collection('myCSVs');
var queue = async.queue(collection.insert.bind(collection), 5);
csv()
.from.path('./input.csv', { columns: true })
.transform(function (data, index, cb) {
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
})
.on('error', function (err) {
console.log('ERROR: ' + err.message);
})
.on('end', function () {
queue.drain = function() {
collection.count(function(err, count) {
console.log('Number of documents:', count);
db.close();
});
};
});
});
You haven't mentioned the database name in your MongoClient.connect function call. You can do so like this:
MongoClient.connect('mongodb://localhost:27017/database_name',function(err, db) {
Then you can do:
var collection = db.collection('myCSVs');
If myCSVs is a collection inside database_name
Or you can also do:
MongoClient.connect('mongodb://localhost:27017',function(err, mongoclient) {
var db = mongoclient.db('database_name');
var collection = db.collection('myCSVs');
});
You have to change
var queue = async.queue(collection.insert.bind(collection), 5);
Into:
var q = async.queue(function (task, callback) {
console.log('hello ' + task.name);
callback();
}, 2);
IN this line :
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
you are calling push with data and with a callback, but its not implemented in your
var queue = async.queue(collection.insert.bind(collection), 5);

Async and parallel functions in nodejs

I get and issue using parallel() function in Async module...
The array images is empty...
var asyncTasks = [];
switch(type){
case 'instafilm':
var newFileName;
var images = [];
selectedPhotos.forEach(function(id){
newFileName = pathDir + '/' + id + '.jpg';
asyncTasks.push(function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
});
});
});
asyncTasks.push( function(callback){
console.log(images); // empty
});
break;
}
console.log(images); // empty
EDIT #1 :
var asyncTasks = [];
var newFileName;
var images = [];
selectedPhotos.forEach(function(id){
newFileName = pathDir + '/' + id + '.jpg';
asyncTasks.push(function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
callback(null);
});
});
});
asyncTasks.push( function(callback){
console.log(images); // before creating .zip I need to write files using images[]
gm()
.in('-page', '+0+0')
.in('./public/images/instafilm.jpg')
.in('-page', '+10+10')
.in(images[0])
.in('-page', '+10+30')
.in(images[1])
.in('-page', '+10+60')
.in(images[2])
.in('-page', '+10+90')
.in(images[3])
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
callback();
});
});
async.parallel(asyncTasks, function(){
// here I write a .zip file
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});
If I understand your snippet right, your callback function is never called in :
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
});
which is a problem.
Another issue I can think of is that you call console.log(images) just after executing this snippet (so async code have not been executed yet) and as a part of your parallel call. But parallel as it's name implies (and it's doc confirm) run your tasks "parallel" (as much as node allows it).
So, especially since console.log() is synchronous, your media([...]) call wouldn't have time to finish before the console call.
If I guess right, you then need to add a callback like this :
function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
callback(null);
});
}
And to check images content after your async code finish like :
parallel(asyncTasks,function(){
console.log(images)
})
Edit :
According to new code snippet :
first part of the code look OK, until gm call wich should be done as this :
async.parallel(asyncTasks, function(){
gm()
.in('-page', '+0+0')
.in('./public/images/instafilm.jpg')
.in('-page', '+10+10')
.in(images[0])
.in('-page', '+10+30')
.in(images[1])
.in('-page', '+10+60')
.in(images[2])
.in('-page', '+10+90')
.in(images[3])
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
// here I write a .zip file
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});
});

Node.js : show process while copying files

I wrote a function to copy a directory to another... But there's a problem : I use callback function to send the copied size. This callback comes too early (before the end of the copy). I think the problem is that the process is asynchronous. Can you help me?
var fs=require('fs');
var copyDir=function copyDir(from, to, callback){
if(!fs.existsSync(to)){
fs.mkdirSync(to);
}
console.log(from+" ==> "+to);
var count = 0;
fs.readdir(from, function(err,files){
for(var i=0;i<files.length;i++){
var f = from+"/"+files[i];
var d = f.replace(from, to);
console.log(f+" ("+i+")"+ " : "+d);
if(!fs.existsSync(d)){
if(!fs.statSync(f).isFile()){
//fs.mkdirSync(f.replace(from, to));
count += fs.statSync(f).size;
console.log(f + " will make an inception!")
copyDir(f, f.replace(from, to), function(err, cp){callback(err, cp)});
}else{
var size = fs.statSync(f).size;
copyFile(f, f.replace(from, to), function(err){
if(err) callback(err, count)
});
count += size;
callback(null, count);
}
}
}
});
}
function copyFile(source, target, cb) {
fs.readFile(source, function (err, data) {
if (err) throw err;
fs.writeFileSync(target, data, function (err, data){
if(err) throw err;
cb(null, fs.statSync(source).size); //This callback comes before the copy end.
});
});
}
exports.copyDir = copyDir;
copyDir is called by:
io.sockets.on('connection', function(socket){
console.log('connection');
socket.on('startCopy', function(data){
sizeDir('templates', function(e, r){
copyDir('templates', 'tmp', function(err, cp){
console.log("copy % " + Math.round(100*cp/r));
socket.emit('copy', {prog: Math.round(100*cp/r)});
});
});
});
});
You can rewrite your else code with following:
(function() {
var size = fs.statSync(f).size;
copyFile(f, f.replace(from, to), function(err){
if(err) {
callback(err, count);
return;
}
count += size;
callback(null, count);
});
})();
But, you have a lot of synchronous function in your code. You should know about all caveats of this approach. This article may be helpful

Move File in ExpressJS/NodeJS

I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}

Resources