Async and parallel functions in nodejs - node.js

I get and issue using parallel() function in Async module...
The array images is empty...
var asyncTasks = [];
switch(type){
case 'instafilm':
var newFileName;
var images = [];
selectedPhotos.forEach(function(id){
newFileName = pathDir + '/' + id + '.jpg';
asyncTasks.push(function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
});
});
});
asyncTasks.push( function(callback){
console.log(images); // empty
});
break;
}
console.log(images); // empty
EDIT #1 :
var asyncTasks = [];
var newFileName;
var images = [];
selectedPhotos.forEach(function(id){
newFileName = pathDir + '/' + id + '.jpg';
asyncTasks.push(function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
callback(null);
});
});
});
asyncTasks.push( function(callback){
console.log(images); // before creating .zip I need to write files using images[]
gm()
.in('-page', '+0+0')
.in('./public/images/instafilm.jpg')
.in('-page', '+10+10')
.in(images[0])
.in('-page', '+10+30')
.in(images[1])
.in('-page', '+10+60')
.in(images[2])
.in('-page', '+10+90')
.in(images[3])
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
callback();
});
});
async.parallel(asyncTasks, function(){
// here I write a .zip file
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});

If I understand your snippet right, your callback function is never called in :
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
});
which is a problem.
Another issue I can think of is that you call console.log(images) just after executing this snippet (so async code have not been executed yet) and as a part of your parallel call. But parallel as it's name implies (and it's doc confirm) run your tasks "parallel" (as much as node allows it).
So, especially since console.log() is synchronous, your media([...]) call wouldn't have time to finish before the console call.
If I guess right, you then need to add a callback like this :
function(callback){
self.api.media(id, function(err, media, remaining, limit) {
images.push(media.images.thumbnail.url);
callback(null);
});
}
And to check images content after your async code finish like :
parallel(asyncTasks,function(){
console.log(images)
})
Edit :
According to new code snippet :
first part of the code look OK, until gm call wich should be done as this :
async.parallel(asyncTasks, function(){
gm()
.in('-page', '+0+0')
.in('./public/images/instafilm.jpg')
.in('-page', '+10+10')
.in(images[0])
.in('-page', '+10+30')
.in(images[1])
.in('-page', '+10+60')
.in(images[2])
.in('-page', '+10+90')
.in(images[3])
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
// here I write a .zip file
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});
});

Related

Downloads are not complete while creating write stream

I have function with a callback to make sure the image download is finished.
If I have multiple images to be downloaded(around 11), It downloads all but the file size is not correct. Therefore some are corrupted.
I thought using the callback will make sure the downloads are done completely before moving on.
This is the code below I am using:
imageExtractor(imageId,function(){
zipmaker(imageId, function () {
});
});
imageIndex =0;
function imageExtractor(imageId, callback) {
images.ImageIds.foreach(function (image){
imageIndex++;
// Here I call the download image function
downloadImageFromURLAndSave(imageURL, imageId, category,callback) {
if(images.length=seriesIndex){
callback();
}
}
}
function downloadImageFromURLAndSave(imageURL, imageId, category,callback) {
console.log("Download has started.");
console.log(imageURL);
request
.get(imageURL)
.pipe(fs.createWriteStream(__dirname + category+
'/digital/' + imageId)
.on('finish', function() {
console.log("Download has finished for " + imageId+ " Congratulations.");
callback();
}));
}
It seems the function after this(zipmaker) is called early before the images are downloaded even though I have a callback as you can see.
If I where you, here is how I would structure your code:
index.js:
var fs = require('fs'),
request = require('request');
var config = require('./test.json');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
for (var key in config) {
if (config.hasOwnProperty(key)) {
download(key, config[key], function(){
console.log('done');
});
}
}
test.json:
{
"https://www.google.com/images/srpr/logo3w.png": "google.png",
"http://www.jqueryscript.net/images/Simplest-Responsive-jQuery-Image-Lightbox-Plugin-simple-lightbox.jpg": "panda.jpg"
}
Hope this helps!

Async parallel in NodeJS

I'd like to know how to execute async functions, my goal is to uploads files and compress them... But it doesn't work because my files are not yet uploading when I write the .zip...
SO I get an empty .zip file...
var asyncTasks = [];
selectedPhotos.forEach(function(id){
asyncTasks.push(function(callback){
var newFileName = pathDir + '/' + id + '.jpg';
api.media(id, function(err, media, remaining, limit) {
gm()
.in('-page', '+0+0')
.in('./public/images/instabox.jpg')
.in('-page', '+10+10')
.in(media.images.thumbnail.url)
.mosaic()
.minify()
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
});
});
callback();
});
});
async.parallel(asyncTasks, function(){
var admZip = new AdmZip();
var pathDir = './public/uploads/'+reference;
admZip.addLocalFolder(pathDir);
var willSendthis = admZip.toBuffer();
admZip.writeZip('./public/uploads/'+reference+'.zip');
});
You're calling the callback() too early. Move callback(); inside of your .write() callback like so:
.write(newFileName, function (err) {
if (!err) console.log('done');
if (err) console.log(err);
callback(err);
});
When you execute the callback, that signifies that the task is finished. So without the above change, you're basically telling async that you're done immediately.

Stream buffer to client in Express

I have request handler to send file from MongoDB (GridFS) to client like below, but it use data variable so content is in memory. I need to make this in streaming mode and send file in chunks to client. I can't regognize how to pipe buffer to response. Look at second code - it doesn't work, but show something what i need.
Maybe it is useful: Data in GridFS is Base64 encoded, but may be changed if streaming can be more efficient.
In-Memory version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
var data = '';
stream.on("data", function(chunk) {
data += chunk;
});
stream.on("end", function() {
res.send(new Buffer(data, 'base64'));
});
});
});
});
Streaming mode version
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
var stream = gridStore.stream(true);
stream.on("data", function(chunk) {
new Buffer(chunk, 'base64').pipe(res);
});
stream.on("end", function() {
res.end();
});
});
});
});
Update
I think I'm close to resolve this. I found this works, but does't decode from Base64:
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(res);
});
exports.sendFile = function(db, res, fileId) {
var grid = require('gridfs-stream');
var gfs = grid(db, mongoose.mongo);
var on_error = function(){
res.status(404).end();
};
var readstream = gfs.createReadStream({
filename: fileId,
root: 'r'
});
readstream.on('error', function(err) {
if (('\'' + err + '\'') === '\'Error: does not exist\'') {
return on_error && on_error(err);
}
throw err;
});
return readstream.pipe(res);
}
I found a solution, but think that can be better. I use base64-stream module to decode Base64 stream. Solution below:
router.get('/get/:id', function(req,res){
getById(req.params.id, function(err, fileId){
new GridStore(db, fileId, "r").open(function(err, gridStore) {
res.set('Content-Type', gridStore.contentType);
gridStore.stream(true).pipe(base64.decode()).pipe(res);
});
});
});
stream.on("data", function(chunk) {
res.send(chunk.toString('utf8'));
});

Move File in ExpressJS/NodeJS

I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}

Store Stream in GridFS using Node.js

I'd like to store a Stream in a GridFS. However it doesn't work as expected (i.e. no console log).
var mongo = require("mongodb");
var db = ...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID, name, "w");
gs.open(function (err, gs) {
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
});
});
Does anyone know why it doesn't work?
I think you forgot to pipe the stream into the gridfs file. You can do it directly since a gridfs file behaves like a stream (see the last line within the db.open callback). Also you forgot the parenthesis on constructor invocation of ObjectID and you don't check for any errors when opening either db or file. My code looks structurally equal and it does work. Hope that helps...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID(), name, "w");
gs.open(function (err, gs) {
if(err) console.log(...)
else{
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
stream.pipe(gs);
}
});
});

Resources